Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,148 +1,148 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.admin.admin |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Controller for Admin panel of Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 7, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import logging |
|
30 | 30 | |
|
31 | 31 | from pylons import request, tmpl_context as c, url |
|
32 | 32 | from sqlalchemy.orm import joinedload |
|
33 | 33 | from whoosh.qparser.default import QueryParser |
|
34 | 34 | from whoosh.qparser.dateparse import DateParserPlugin |
|
35 | 35 | from whoosh import query |
|
36 | 36 | from sqlalchemy.sql.expression import or_, and_, func |
|
37 | 37 | |
|
38 | 38 | from kallithea.model.db import UserLog |
|
39 | 39 | from kallithea.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
40 | 40 | from kallithea.lib.base import BaseController, render |
|
41 | 41 | from kallithea.lib.utils2 import safe_int, remove_prefix, remove_suffix |
|
42 | 42 | from kallithea.lib.indexers import JOURNAL_SCHEMA |
|
43 | 43 | from kallithea.lib.helpers import Page |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | def _journal_filter(user_log, search_term): |
|
50 | 50 | """ |
|
51 | 51 | Filters sqlalchemy user_log based on search_term with whoosh Query language |
|
52 | 52 | http://packages.python.org/Whoosh/querylang.html |
|
53 | 53 | |
|
54 | 54 | :param user_log: |
|
55 | 55 | :param search_term: |
|
56 | 56 | """ |
|
57 |
log.debug('Initial search term: %r' |
|
|
57 | log.debug('Initial search term: %r', search_term) | |
|
58 | 58 | qry = None |
|
59 | 59 | if search_term: |
|
60 | 60 | qp = QueryParser('repository', schema=JOURNAL_SCHEMA) |
|
61 | 61 | qp.add_plugin(DateParserPlugin()) |
|
62 | 62 | qry = qp.parse(unicode(search_term)) |
|
63 |
log.debug('Filtering using parsed query %r' |
|
|
63 | log.debug('Filtering using parsed query %r', qry) | |
|
64 | 64 | |
|
65 | 65 | def wildcard_handler(col, wc_term): |
|
66 | 66 | if wc_term.startswith('*') and not wc_term.endswith('*'): |
|
67 | 67 | #postfix == endswith |
|
68 | 68 | wc_term = remove_prefix(wc_term, prefix='*') |
|
69 | 69 | return func.lower(col).endswith(wc_term) |
|
70 | 70 | elif wc_term.startswith('*') and wc_term.endswith('*'): |
|
71 | 71 | #wildcard == ilike |
|
72 | 72 | wc_term = remove_prefix(wc_term, prefix='*') |
|
73 | 73 | wc_term = remove_suffix(wc_term, suffix='*') |
|
74 | 74 | return func.lower(col).contains(wc_term) |
|
75 | 75 | |
|
76 | 76 | def get_filterion(field, val, term): |
|
77 | 77 | |
|
78 | 78 | if field == 'repository': |
|
79 | 79 | field = getattr(UserLog, 'repository_name') |
|
80 | 80 | elif field == 'ip': |
|
81 | 81 | field = getattr(UserLog, 'user_ip') |
|
82 | 82 | elif field == 'date': |
|
83 | 83 | field = getattr(UserLog, 'action_date') |
|
84 | 84 | elif field == 'username': |
|
85 | 85 | field = getattr(UserLog, 'username') |
|
86 | 86 | else: |
|
87 | 87 | field = getattr(UserLog, field) |
|
88 |
log.debug('filter field: %s val=>%s' |
|
|
88 | log.debug('filter field: %s val=>%s', field, val) | |
|
89 | 89 | |
|
90 | 90 | #sql filtering |
|
91 | 91 | if isinstance(term, query.Wildcard): |
|
92 | 92 | return wildcard_handler(field, val) |
|
93 | 93 | elif isinstance(term, query.Prefix): |
|
94 | 94 | return func.lower(field).startswith(func.lower(val)) |
|
95 | 95 | elif isinstance(term, query.DateRange): |
|
96 | 96 | return and_(field >= val[0], field <= val[1]) |
|
97 | 97 | return func.lower(field) == func.lower(val) |
|
98 | 98 | |
|
99 | 99 | if isinstance(qry, (query.And, query.Term, query.Prefix, query.Wildcard, |
|
100 | 100 | query.DateRange)): |
|
101 | 101 | if not isinstance(qry, query.And): |
|
102 | 102 | qry = [qry] |
|
103 | 103 | for term in qry: |
|
104 | 104 | field = term.fieldname |
|
105 | 105 | val = (term.text if not isinstance(term, query.DateRange) |
|
106 | 106 | else [term.startdate, term.enddate]) |
|
107 | 107 | user_log = user_log.filter(get_filterion(field, val, term)) |
|
108 | 108 | elif isinstance(qry, query.Or): |
|
109 | 109 | filters = [] |
|
110 | 110 | for term in qry: |
|
111 | 111 | field = term.fieldname |
|
112 | 112 | val = (term.text if not isinstance(term, query.DateRange) |
|
113 | 113 | else [term.startdate, term.enddate]) |
|
114 | 114 | filters.append(get_filterion(field, val, term)) |
|
115 | 115 | user_log = user_log.filter(or_(*filters)) |
|
116 | 116 | |
|
117 | 117 | return user_log |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | class AdminController(BaseController): |
|
121 | 121 | |
|
122 | 122 | @LoginRequired() |
|
123 | 123 | def __before__(self): |
|
124 | 124 | super(AdminController, self).__before__() |
|
125 | 125 | |
|
126 | 126 | @HasPermissionAllDecorator('hg.admin') |
|
127 | 127 | def index(self): |
|
128 | 128 | users_log = UserLog.query()\ |
|
129 | 129 | .options(joinedload(UserLog.user))\ |
|
130 | 130 | .options(joinedload(UserLog.repository)) |
|
131 | 131 | |
|
132 | 132 | #FILTERING |
|
133 | 133 | c.search_term = request.GET.get('filter') |
|
134 | 134 | users_log = _journal_filter(users_log, c.search_term) |
|
135 | 135 | |
|
136 | 136 | users_log = users_log.order_by(UserLog.action_date.desc()) |
|
137 | 137 | |
|
138 | 138 | p = safe_int(request.GET.get('page', 1), 1) |
|
139 | 139 | |
|
140 | 140 | def url_generator(**kw): |
|
141 | 141 | return url.current(filter=c.search_term, **kw) |
|
142 | 142 | |
|
143 | 143 | c.users_log = Page(users_log, page=p, items_per_page=10, url=url_generator) |
|
144 | 144 | |
|
145 | 145 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
146 | 146 | return render('admin/admin_log.html') |
|
147 | 147 | |
|
148 | 148 | return render('admin/admin.html') |
@@ -1,149 +1,149 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.admin.auth_settings |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | pluggable authentication controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Nov 26, 2010 |
|
23 | 23 | :author: akesterson |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import formencode.htmlfill |
|
28 | 28 | import traceback |
|
29 | 29 | |
|
30 | 30 | from pylons import request, tmpl_context as c, url |
|
31 | 31 | from pylons.controllers.util import redirect |
|
32 | 32 | from pylons.i18n.translation import _ |
|
33 | 33 | |
|
34 | 34 | from kallithea.lib import helpers as h |
|
35 | 35 | from kallithea.lib.compat import formatted_json |
|
36 | 36 | from kallithea.lib.base import BaseController, render |
|
37 | 37 | from kallithea.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
38 | 38 | from kallithea.lib import auth_modules |
|
39 | 39 | from kallithea.model.forms import AuthSettingsForm |
|
40 | 40 | from kallithea.model.db import Setting |
|
41 | 41 | from kallithea.model.meta import Session |
|
42 | 42 | |
|
43 | 43 | log = logging.getLogger(__name__) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class AuthSettingsController(BaseController): |
|
47 | 47 | |
|
48 | 48 | @LoginRequired() |
|
49 | 49 | @HasPermissionAllDecorator('hg.admin') |
|
50 | 50 | def __before__(self): |
|
51 | 51 | super(AuthSettingsController, self).__before__() |
|
52 | 52 | |
|
53 | 53 | def __load_defaults(self): |
|
54 | 54 | c.available_plugins = [ |
|
55 | 55 | 'kallithea.lib.auth_modules.auth_internal', |
|
56 | 56 | 'kallithea.lib.auth_modules.auth_container', |
|
57 | 57 | 'kallithea.lib.auth_modules.auth_ldap', |
|
58 | 58 | 'kallithea.lib.auth_modules.auth_crowd', |
|
59 | 59 | 'kallithea.lib.auth_modules.auth_pam' |
|
60 | 60 | ] |
|
61 | 61 | c.enabled_plugins = Setting.get_auth_plugins() |
|
62 | 62 | |
|
63 | 63 | def __render(self, defaults, errors): |
|
64 | 64 | c.defaults = {} |
|
65 | 65 | c.plugin_settings = {} |
|
66 | 66 | c.plugin_shortnames = {} |
|
67 | 67 | |
|
68 | 68 | for module in c.enabled_plugins: |
|
69 | 69 | plugin = auth_modules.loadplugin(module) |
|
70 | 70 | plugin_name = plugin.name |
|
71 | 71 | c.plugin_shortnames[module] = plugin_name |
|
72 | 72 | c.plugin_settings[module] = plugin.plugin_settings() |
|
73 | 73 | for v in c.plugin_settings[module]: |
|
74 | 74 | fullname = ("auth_" + plugin_name + "_" + v["name"]) |
|
75 | 75 | if "default" in v: |
|
76 | 76 | c.defaults[fullname] = v["default"] |
|
77 | 77 | # Current values will be the default on the form, if there are any |
|
78 | 78 | setting = Setting.get_by_name(fullname) |
|
79 | 79 | if setting is not None: |
|
80 | 80 | c.defaults[fullname] = setting.app_settings_value |
|
81 | 81 | # we want to show , separated list of enabled plugins |
|
82 | 82 | c.defaults['auth_plugins'] = ','.join(c.enabled_plugins) |
|
83 | 83 | |
|
84 | 84 | if defaults: |
|
85 | 85 | c.defaults.update(defaults) |
|
86 | 86 | |
|
87 | 87 | log.debug(formatted_json(defaults)) |
|
88 | 88 | return formencode.htmlfill.render( |
|
89 | 89 | render('admin/auth/auth_settings.html'), |
|
90 | 90 | defaults=c.defaults, |
|
91 | 91 | errors=errors, |
|
92 | 92 | prefix_error=False, |
|
93 | 93 | encoding="UTF-8", |
|
94 | 94 | force_defaults=False) |
|
95 | 95 | |
|
96 | 96 | def index(self): |
|
97 | 97 | self.__load_defaults() |
|
98 | 98 | return self.__render(defaults=None, errors=None) |
|
99 | 99 | |
|
100 | 100 | def auth_settings(self): |
|
101 | 101 | """POST create and store auth settings""" |
|
102 | 102 | self.__load_defaults() |
|
103 | 103 | log.debug("POST Result: %s", formatted_json(dict(request.POST))) |
|
104 | 104 | |
|
105 | 105 | # First, parse only the plugin list (not the plugin settings). |
|
106 | 106 | _auth_plugins_validator = AuthSettingsForm([]).fields['auth_plugins'] |
|
107 | 107 | try: |
|
108 | 108 | new_enabled_plugins = _auth_plugins_validator.to_python(request.POST.get('auth_plugins')) |
|
109 | 109 | except formencode.Invalid: |
|
110 | 110 | # User provided an invalid plugin list. Just fall back to |
|
111 | 111 | # the list of currently enabled plugins. (We'll re-validate |
|
112 | 112 | # and show an error message to the user, below.) |
|
113 | 113 | pass |
|
114 | 114 | else: |
|
115 | 115 | # Hide plugins that the user has asked to be disabled, but |
|
116 | 116 | # do not show plugins that the user has asked to be enabled |
|
117 | 117 | # (yet), since that'll cause validation errors and/or wrong |
|
118 | 118 | # settings being applied (e.g. checkboxes being cleared), |
|
119 | 119 | # since the plugin settings will not be in the POST data. |
|
120 | 120 | c.enabled_plugins = [ p for p in c.enabled_plugins if p in new_enabled_plugins ] |
|
121 | 121 | |
|
122 | 122 | # Next, parse everything including plugin settings. |
|
123 | 123 | _form = AuthSettingsForm(c.enabled_plugins)() |
|
124 | 124 | |
|
125 | 125 | try: |
|
126 | 126 | form_result = _form.to_python(dict(request.POST)) |
|
127 | 127 | for k, v in form_result.items(): |
|
128 | 128 | if k == 'auth_plugins': |
|
129 | 129 | # we want to store it comma separated inside our settings |
|
130 | 130 | v = ','.join(v) |
|
131 |
log.debug("%s = %s" |
|
|
131 | log.debug("%s = %s", k, str(v)) | |
|
132 | 132 | setting = Setting.create_or_update(k, v) |
|
133 | 133 | Session().add(setting) |
|
134 | 134 | Session().commit() |
|
135 | 135 | h.flash(_('Auth settings updated successfully'), |
|
136 | 136 | category='success') |
|
137 | 137 | except formencode.Invalid as errors: |
|
138 | 138 | log.error(traceback.format_exc()) |
|
139 | 139 | e = errors.error_dict or {} |
|
140 | 140 | return self.__render( |
|
141 | 141 | defaults=errors.value, |
|
142 | 142 | errors=e, |
|
143 | 143 | ) |
|
144 | 144 | except Exception: |
|
145 | 145 | log.error(traceback.format_exc()) |
|
146 | 146 | h.flash(_('error occurred during update of auth settings'), |
|
147 | 147 | category='error') |
|
148 | 148 | |
|
149 | 149 | return redirect(url('auth_home')) |
@@ -1,293 +1,293 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.admin.gist |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | gist controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: May 9, 2013 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import time |
|
29 | 29 | import logging |
|
30 | 30 | import traceback |
|
31 | 31 | import formencode.htmlfill |
|
32 | 32 | |
|
33 | 33 | from pylons import request, response, tmpl_context as c, url |
|
34 | 34 | from pylons.controllers.util import redirect |
|
35 | 35 | from pylons.i18n.translation import _ |
|
36 | 36 | |
|
37 | 37 | from kallithea.model.forms import GistForm |
|
38 | 38 | from kallithea.model.gist import GistModel |
|
39 | 39 | from kallithea.model.meta import Session |
|
40 | 40 | from kallithea.model.db import Gist, User |
|
41 | 41 | from kallithea.lib import helpers as h |
|
42 | 42 | from kallithea.lib.base import BaseController, render |
|
43 | 43 | from kallithea.lib.auth import LoginRequired, NotAnonymous |
|
44 | 44 | from kallithea.lib.utils import jsonify |
|
45 | 45 | from kallithea.lib.utils2 import safe_int, time_to_datetime |
|
46 | 46 | from kallithea.lib.helpers import Page |
|
47 | 47 | from webob.exc import HTTPNotFound, HTTPForbidden |
|
48 | 48 | from sqlalchemy.sql.expression import or_ |
|
49 | 49 | from kallithea.lib.vcs.exceptions import VCSError, NodeNotChangedError |
|
50 | 50 | |
|
51 | 51 | log = logging.getLogger(__name__) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class GistsController(BaseController): |
|
55 | 55 | """REST Controller styled on the Atom Publishing Protocol""" |
|
56 | 56 | |
|
57 | 57 | def __load_defaults(self, extra_values=None): |
|
58 | 58 | c.lifetime_values = [ |
|
59 | 59 | (str(-1), _('Forever')), |
|
60 | 60 | (str(5), _('5 minutes')), |
|
61 | 61 | (str(60), _('1 hour')), |
|
62 | 62 | (str(60 * 24), _('1 day')), |
|
63 | 63 | (str(60 * 24 * 30), _('1 month')), |
|
64 | 64 | ] |
|
65 | 65 | if extra_values: |
|
66 | 66 | c.lifetime_values.append(extra_values) |
|
67 | 67 | c.lifetime_options = [(c.lifetime_values, _("Lifetime"))] |
|
68 | 68 | |
|
69 | 69 | @LoginRequired() |
|
70 | 70 | def index(self): |
|
71 | 71 | """GET /admin/gists: All items in the collection""" |
|
72 | 72 | # url('gists') |
|
73 | 73 | not_default_user = c.authuser.username != User.DEFAULT_USER |
|
74 | 74 | c.show_private = request.GET.get('private') and not_default_user |
|
75 | 75 | c.show_public = request.GET.get('public') and not_default_user |
|
76 | 76 | |
|
77 | 77 | gists = Gist().query()\ |
|
78 | 78 | .filter(or_(Gist.gist_expires == -1, Gist.gist_expires >= time.time()))\ |
|
79 | 79 | .order_by(Gist.created_on.desc()) |
|
80 | 80 | |
|
81 | 81 | # MY private |
|
82 | 82 | if c.show_private and not c.show_public: |
|
83 | 83 | gists = gists.filter(Gist.gist_type == Gist.GIST_PRIVATE)\ |
|
84 | 84 | .filter(Gist.gist_owner == c.authuser.user_id) |
|
85 | 85 | # MY public |
|
86 | 86 | elif c.show_public and not c.show_private: |
|
87 | 87 | gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC)\ |
|
88 | 88 | .filter(Gist.gist_owner == c.authuser.user_id) |
|
89 | 89 | |
|
90 | 90 | # MY public+private |
|
91 | 91 | elif c.show_private and c.show_public: |
|
92 | 92 | gists = gists.filter(or_(Gist.gist_type == Gist.GIST_PUBLIC, |
|
93 | 93 | Gist.gist_type == Gist.GIST_PRIVATE))\ |
|
94 | 94 | .filter(Gist.gist_owner == c.authuser.user_id) |
|
95 | 95 | |
|
96 | 96 | # default show ALL public gists |
|
97 | 97 | if not c.show_public and not c.show_private: |
|
98 | 98 | gists = gists.filter(Gist.gist_type == Gist.GIST_PUBLIC) |
|
99 | 99 | |
|
100 | 100 | c.gists = gists |
|
101 | 101 | p = safe_int(request.GET.get('page', 1), 1) |
|
102 | 102 | c.gists_pager = Page(c.gists, page=p, items_per_page=10) |
|
103 | 103 | return render('admin/gists/index.html') |
|
104 | 104 | |
|
105 | 105 | @LoginRequired() |
|
106 | 106 | @NotAnonymous() |
|
107 | 107 | def create(self): |
|
108 | 108 | """POST /admin/gists: Create a new item""" |
|
109 | 109 | # url('gists') |
|
110 | 110 | self.__load_defaults() |
|
111 | 111 | gist_form = GistForm([x[0] for x in c.lifetime_values])() |
|
112 | 112 | try: |
|
113 | 113 | form_result = gist_form.to_python(dict(request.POST)) |
|
114 | 114 | #TODO: multiple files support, from the form |
|
115 | 115 | filename = form_result['filename'] or Gist.DEFAULT_FILENAME |
|
116 | 116 | nodes = { |
|
117 | 117 | filename: { |
|
118 | 118 | 'content': form_result['content'], |
|
119 | 119 | 'lexer': form_result['mimetype'] # None is autodetect |
|
120 | 120 | } |
|
121 | 121 | } |
|
122 | 122 | _public = form_result['public'] |
|
123 | 123 | gist_type = Gist.GIST_PUBLIC if _public else Gist.GIST_PRIVATE |
|
124 | 124 | gist = GistModel().create( |
|
125 | 125 | description=form_result['description'], |
|
126 | 126 | owner=c.authuser.user_id, |
|
127 | 127 | gist_mapping=nodes, |
|
128 | 128 | gist_type=gist_type, |
|
129 | 129 | lifetime=form_result['lifetime'] |
|
130 | 130 | ) |
|
131 | 131 | Session().commit() |
|
132 | 132 | new_gist_id = gist.gist_access_id |
|
133 | 133 | except formencode.Invalid as errors: |
|
134 | 134 | defaults = errors.value |
|
135 | 135 | |
|
136 | 136 | return formencode.htmlfill.render( |
|
137 | 137 | render('admin/gists/new.html'), |
|
138 | 138 | defaults=defaults, |
|
139 | 139 | errors=errors.error_dict or {}, |
|
140 | 140 | prefix_error=False, |
|
141 | 141 | encoding="UTF-8", |
|
142 | 142 | force_defaults=False) |
|
143 | 143 | |
|
144 | 144 | except Exception as e: |
|
145 | 145 | log.error(traceback.format_exc()) |
|
146 | 146 | h.flash(_('Error occurred during gist creation'), category='error') |
|
147 | 147 | return redirect(url('new_gist')) |
|
148 | 148 | return redirect(url('gist', gist_id=new_gist_id)) |
|
149 | 149 | |
|
150 | 150 | @LoginRequired() |
|
151 | 151 | @NotAnonymous() |
|
152 | 152 | def new(self, format='html'): |
|
153 | 153 | """GET /admin/gists/new: Form to create a new item""" |
|
154 | 154 | # url('new_gist') |
|
155 | 155 | self.__load_defaults() |
|
156 | 156 | return render('admin/gists/new.html') |
|
157 | 157 | |
|
158 | 158 | @LoginRequired() |
|
159 | 159 | @NotAnonymous() |
|
160 | 160 | def update(self, gist_id): |
|
161 | 161 | """PUT /admin/gists/gist_id: Update an existing item""" |
|
162 | 162 | # Forms posted to this method should contain a hidden field: |
|
163 | 163 | # <input type="hidden" name="_method" value="PUT" /> |
|
164 | 164 | # Or using helpers: |
|
165 | 165 | # h.form(url('gist', gist_id=ID), |
|
166 | 166 | # method='put') |
|
167 | 167 | # url('gist', gist_id=ID) |
|
168 | 168 | |
|
169 | 169 | @LoginRequired() |
|
170 | 170 | @NotAnonymous() |
|
171 | 171 | def delete(self, gist_id): |
|
172 | 172 | """DELETE /admin/gists/gist_id: Delete an existing item""" |
|
173 | 173 | # Forms posted to this method should contain a hidden field: |
|
174 | 174 | # <input type="hidden" name="_method" value="DELETE" /> |
|
175 | 175 | # Or using helpers: |
|
176 | 176 | # h.form(url('gist', gist_id=ID), |
|
177 | 177 | # method='delete') |
|
178 | 178 | # url('gist', gist_id=ID) |
|
179 | 179 | gist = GistModel().get_gist(gist_id) |
|
180 | 180 | owner = gist.gist_owner == c.authuser.user_id |
|
181 | 181 | if h.HasPermissionAny('hg.admin')() or owner: |
|
182 | 182 | GistModel().delete(gist) |
|
183 | 183 | Session().commit() |
|
184 | 184 | h.flash(_('Deleted gist %s') % gist.gist_access_id, category='success') |
|
185 | 185 | else: |
|
186 | 186 | raise HTTPForbidden() |
|
187 | 187 | |
|
188 | 188 | return redirect(url('gists')) |
|
189 | 189 | |
|
190 | 190 | @LoginRequired() |
|
191 | 191 | def show(self, gist_id, revision='tip', format='html', f_path=None): |
|
192 | 192 | """GET /admin/gists/gist_id: Show a specific item""" |
|
193 | 193 | # url('gist', gist_id=ID) |
|
194 | 194 | c.gist = Gist.get_or_404(gist_id) |
|
195 | 195 | |
|
196 | 196 | #check if this gist is not expired |
|
197 | 197 | if c.gist.gist_expires != -1: |
|
198 | 198 | if time.time() > c.gist.gist_expires: |
|
199 |
log.error('Gist expired at %s' |
|
|
200 |
|
|
|
199 | log.error('Gist expired at %s', | |
|
200 | time_to_datetime(c.gist.gist_expires)) | |
|
201 | 201 | raise HTTPNotFound() |
|
202 | 202 | try: |
|
203 | 203 | c.file_changeset, c.files = GistModel().get_gist_files(gist_id, |
|
204 | 204 | revision=revision) |
|
205 | 205 | except VCSError: |
|
206 | 206 | log.error(traceback.format_exc()) |
|
207 | 207 | raise HTTPNotFound() |
|
208 | 208 | if format == 'raw': |
|
209 | 209 | content = '\n\n'.join([f.content for f in c.files if (f_path is None or f.path == f_path)]) |
|
210 | 210 | response.content_type = 'text/plain' |
|
211 | 211 | return content |
|
212 | 212 | return render('admin/gists/show.html') |
|
213 | 213 | |
|
214 | 214 | @LoginRequired() |
|
215 | 215 | @NotAnonymous() |
|
216 | 216 | def edit(self, gist_id, format='html'): |
|
217 | 217 | """GET /admin/gists/gist_id/edit: Form to edit an existing item""" |
|
218 | 218 | # url('edit_gist', gist_id=ID) |
|
219 | 219 | c.gist = Gist.get_or_404(gist_id) |
|
220 | 220 | |
|
221 | 221 | #check if this gist is not expired |
|
222 | 222 | if c.gist.gist_expires != -1: |
|
223 | 223 | if time.time() > c.gist.gist_expires: |
|
224 |
log.error('Gist expired at %s' |
|
|
225 |
|
|
|
224 | log.error('Gist expired at %s', | |
|
225 | time_to_datetime(c.gist.gist_expires)) | |
|
226 | 226 | raise HTTPNotFound() |
|
227 | 227 | try: |
|
228 | 228 | c.file_changeset, c.files = GistModel().get_gist_files(gist_id) |
|
229 | 229 | except VCSError: |
|
230 | 230 | log.error(traceback.format_exc()) |
|
231 | 231 | raise HTTPNotFound() |
|
232 | 232 | |
|
233 | 233 | self.__load_defaults(extra_values=('0', _('Unmodified'))) |
|
234 | 234 | rendered = render('admin/gists/edit.html') |
|
235 | 235 | |
|
236 | 236 | if request.POST: |
|
237 | 237 | rpost = request.POST |
|
238 | 238 | nodes = {} |
|
239 | 239 | for org_filename, filename, mimetype, content in zip( |
|
240 | 240 | rpost.getall('org_files'), |
|
241 | 241 | rpost.getall('files'), |
|
242 | 242 | rpost.getall('mimetypes'), |
|
243 | 243 | rpost.getall('contents')): |
|
244 | 244 | |
|
245 | 245 | nodes[org_filename] = { |
|
246 | 246 | 'org_filename': org_filename, |
|
247 | 247 | 'filename': filename, |
|
248 | 248 | 'content': content, |
|
249 | 249 | 'lexer': mimetype, |
|
250 | 250 | } |
|
251 | 251 | try: |
|
252 | 252 | GistModel().update( |
|
253 | 253 | gist=c.gist, |
|
254 | 254 | description=rpost['description'], |
|
255 | 255 | owner=c.gist.owner, |
|
256 | 256 | gist_mapping=nodes, |
|
257 | 257 | gist_type=c.gist.gist_type, |
|
258 | 258 | lifetime=rpost['lifetime'] |
|
259 | 259 | ) |
|
260 | 260 | |
|
261 | 261 | Session().commit() |
|
262 | 262 | h.flash(_('Successfully updated gist content'), category='success') |
|
263 | 263 | except NodeNotChangedError: |
|
264 | 264 | # raised if nothing was changed in repo itself. We anyway then |
|
265 | 265 | # store only DB stuff for gist |
|
266 | 266 | Session().commit() |
|
267 | 267 | h.flash(_('Successfully updated gist data'), category='success') |
|
268 | 268 | except Exception: |
|
269 | 269 | log.error(traceback.format_exc()) |
|
270 | 270 | h.flash(_('Error occurred during update of gist %s') % gist_id, |
|
271 | 271 | category='error') |
|
272 | 272 | |
|
273 | 273 | return redirect(url('gist', gist_id=gist_id)) |
|
274 | 274 | |
|
275 | 275 | return rendered |
|
276 | 276 | |
|
277 | 277 | @LoginRequired() |
|
278 | 278 | @NotAnonymous() |
|
279 | 279 | @jsonify |
|
280 | 280 | def check_revision(self, gist_id): |
|
281 | 281 | c.gist = Gist.get_or_404(gist_id) |
|
282 | 282 | last_rev = c.gist.scm_instance.get_changeset() |
|
283 | 283 | success = True |
|
284 | 284 | revision = request.POST.get('revision') |
|
285 | 285 | |
|
286 | 286 | ##TODO: maybe move this to model ? |
|
287 | 287 | if revision != last_rev.raw_id: |
|
288 | log.error('Last revision %s is different than submitted %s' | |
|
289 |
|
|
|
288 | log.error('Last revision %s is different than submitted %s', | |
|
289 | revision, last_rev) | |
|
290 | 290 | # our gist has newer version than we |
|
291 | 291 | success = False |
|
292 | 292 | |
|
293 | 293 | return {'success': success} |
@@ -1,525 +1,525 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.admin.settings |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | settings controller for Kallithea admin |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Jul 14, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | import formencode |
|
31 | 31 | |
|
32 | 32 | from formencode import htmlfill |
|
33 | 33 | from pylons import request, tmpl_context as c, url, config |
|
34 | 34 | from pylons.controllers.util import redirect |
|
35 | 35 | from pylons.i18n.translation import _ |
|
36 | 36 | |
|
37 | 37 | from kallithea.lib import helpers as h |
|
38 | 38 | from kallithea.lib.auth import LoginRequired, HasPermissionAllDecorator |
|
39 | 39 | from kallithea.lib.base import BaseController, render |
|
40 | 40 | from kallithea.lib.celerylib import tasks, run_task |
|
41 | 41 | from kallithea.lib.exceptions import HgsubversionImportError |
|
42 | 42 | from kallithea.lib.utils import repo2db_mapper, set_app_settings |
|
43 | 43 | from kallithea.model.db import Ui, Repository, Setting |
|
44 | 44 | from kallithea.model.forms import ApplicationSettingsForm, \ |
|
45 | 45 | ApplicationUiSettingsForm, ApplicationVisualisationForm |
|
46 | 46 | from kallithea.model.scm import ScmModel |
|
47 | 47 | from kallithea.model.notification import EmailNotificationModel |
|
48 | 48 | from kallithea.model.meta import Session |
|
49 | 49 | from kallithea.lib.utils2 import str2bool, safe_unicode |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class SettingsController(BaseController): |
|
54 | 54 | """REST Controller styled on the Atom Publishing Protocol""" |
|
55 | 55 | # To properly map this controller, ensure your config/routing.py |
|
56 | 56 | # file has a resource setup: |
|
57 | 57 | # map.resource('setting', 'settings', controller='admin/settings', |
|
58 | 58 | # path_prefix='/admin', name_prefix='admin_') |
|
59 | 59 | |
|
60 | 60 | @LoginRequired() |
|
61 | 61 | def __before__(self): |
|
62 | 62 | super(SettingsController, self).__before__() |
|
63 | 63 | |
|
64 | 64 | def _get_hg_ui_settings(self): |
|
65 | 65 | ret = Ui.query().all() |
|
66 | 66 | |
|
67 | 67 | if not ret: |
|
68 | 68 | raise Exception('Could not get application ui settings !') |
|
69 | 69 | settings = {} |
|
70 | 70 | for each in ret: |
|
71 | 71 | k = each.ui_key |
|
72 | 72 | v = each.ui_value |
|
73 | 73 | if k == '/': |
|
74 | 74 | k = 'root_path' |
|
75 | 75 | |
|
76 | 76 | if k == 'push_ssl': |
|
77 | 77 | v = str2bool(v) |
|
78 | 78 | |
|
79 | 79 | if k.find('.') != -1: |
|
80 | 80 | k = k.replace('.', '_') |
|
81 | 81 | |
|
82 | 82 | if each.ui_section in ['hooks', 'extensions']: |
|
83 | 83 | v = each.ui_active |
|
84 | 84 | |
|
85 | 85 | settings[each.ui_section + '_' + k] = v |
|
86 | 86 | return settings |
|
87 | 87 | |
|
88 | 88 | @HasPermissionAllDecorator('hg.admin') |
|
89 | 89 | def settings_vcs(self): |
|
90 | 90 | """GET /admin/settings: All items in the collection""" |
|
91 | 91 | # url('admin_settings') |
|
92 | 92 | c.active = 'vcs' |
|
93 | 93 | if request.POST: |
|
94 | 94 | application_form = ApplicationUiSettingsForm()() |
|
95 | 95 | try: |
|
96 | 96 | form_result = application_form.to_python(dict(request.POST)) |
|
97 | 97 | except formencode.Invalid as errors: |
|
98 | 98 | return htmlfill.render( |
|
99 | 99 | render('admin/settings/settings.html'), |
|
100 | 100 | defaults=errors.value, |
|
101 | 101 | errors=errors.error_dict or {}, |
|
102 | 102 | prefix_error=False, |
|
103 | 103 | encoding="UTF-8", |
|
104 | 104 | force_defaults=False) |
|
105 | 105 | |
|
106 | 106 | try: |
|
107 | 107 | sett = Ui.get_by_key('push_ssl') |
|
108 | 108 | sett.ui_value = form_result['web_push_ssl'] |
|
109 | 109 | Session().add(sett) |
|
110 | 110 | if c.visual.allow_repo_location_change: |
|
111 | 111 | sett = Ui.get_by_key('/') |
|
112 | 112 | sett.ui_value = form_result['paths_root_path'] |
|
113 | 113 | Session().add(sett) |
|
114 | 114 | |
|
115 | 115 | #HOOKS |
|
116 | 116 | sett = Ui.get_by_key(Ui.HOOK_UPDATE) |
|
117 | 117 | sett.ui_active = form_result['hooks_changegroup_update'] |
|
118 | 118 | Session().add(sett) |
|
119 | 119 | |
|
120 | 120 | sett = Ui.get_by_key(Ui.HOOK_REPO_SIZE) |
|
121 | 121 | sett.ui_active = form_result['hooks_changegroup_repo_size'] |
|
122 | 122 | Session().add(sett) |
|
123 | 123 | |
|
124 | 124 | sett = Ui.get_by_key(Ui.HOOK_PUSH) |
|
125 | 125 | sett.ui_active = form_result['hooks_changegroup_push_logger'] |
|
126 | 126 | Session().add(sett) |
|
127 | 127 | |
|
128 | 128 | sett = Ui.get_by_key(Ui.HOOK_PULL) |
|
129 | 129 | sett.ui_active = form_result['hooks_outgoing_pull_logger'] |
|
130 | 130 | |
|
131 | 131 | Session().add(sett) |
|
132 | 132 | |
|
133 | 133 | ## EXTENSIONS |
|
134 | 134 | sett = Ui.get_by_key('largefiles') |
|
135 | 135 | if not sett: |
|
136 | 136 | #make one if it's not there ! |
|
137 | 137 | sett = Ui() |
|
138 | 138 | sett.ui_key = 'largefiles' |
|
139 | 139 | sett.ui_section = 'extensions' |
|
140 | 140 | sett.ui_active = form_result['extensions_largefiles'] |
|
141 | 141 | Session().add(sett) |
|
142 | 142 | |
|
143 | 143 | sett = Ui.get_by_key('hgsubversion') |
|
144 | 144 | if not sett: |
|
145 | 145 | #make one if it's not there ! |
|
146 | 146 | sett = Ui() |
|
147 | 147 | sett.ui_key = 'hgsubversion' |
|
148 | 148 | sett.ui_section = 'extensions' |
|
149 | 149 | |
|
150 | 150 | sett.ui_active = form_result['extensions_hgsubversion'] |
|
151 | 151 | if sett.ui_active: |
|
152 | 152 | try: |
|
153 | 153 | import hgsubversion # pragma: no cover |
|
154 | 154 | except ImportError: |
|
155 | 155 | raise HgsubversionImportError |
|
156 | 156 | Session().add(sett) |
|
157 | 157 | |
|
158 | 158 | # sett = Ui.get_by_key('hggit') |
|
159 | 159 | # if not sett: |
|
160 | 160 | # #make one if it's not there ! |
|
161 | 161 | # sett = Ui() |
|
162 | 162 | # sett.ui_key = 'hggit' |
|
163 | 163 | # sett.ui_section = 'extensions' |
|
164 | 164 | # |
|
165 | 165 | # sett.ui_active = form_result['extensions_hggit'] |
|
166 | 166 | # Session().add(sett) |
|
167 | 167 | |
|
168 | 168 | Session().commit() |
|
169 | 169 | |
|
170 | 170 | h.flash(_('Updated VCS settings'), category='success') |
|
171 | 171 | |
|
172 | 172 | except HgsubversionImportError: |
|
173 | 173 | log.error(traceback.format_exc()) |
|
174 | 174 | h.flash(_('Unable to activate hgsubversion support. ' |
|
175 | 175 | 'The "hgsubversion" library is missing'), |
|
176 | 176 | category='error') |
|
177 | 177 | |
|
178 | 178 | except Exception: |
|
179 | 179 | log.error(traceback.format_exc()) |
|
180 | 180 | h.flash(_('Error occurred during updating ' |
|
181 | 181 | 'application settings'), category='error') |
|
182 | 182 | |
|
183 | 183 | defaults = Setting.get_app_settings() |
|
184 | 184 | defaults.update(self._get_hg_ui_settings()) |
|
185 | 185 | |
|
186 | 186 | return htmlfill.render( |
|
187 | 187 | render('admin/settings/settings.html'), |
|
188 | 188 | defaults=defaults, |
|
189 | 189 | encoding="UTF-8", |
|
190 | 190 | force_defaults=False) |
|
191 | 191 | |
|
192 | 192 | @HasPermissionAllDecorator('hg.admin') |
|
193 | 193 | def settings_mapping(self): |
|
194 | 194 | """GET /admin/settings/mapping: All items in the collection""" |
|
195 | 195 | # url('admin_settings_mapping') |
|
196 | 196 | c.active = 'mapping' |
|
197 | 197 | if request.POST: |
|
198 | 198 | rm_obsolete = request.POST.get('destroy', False) |
|
199 | 199 | install_git_hooks = request.POST.get('hooks', False) |
|
200 | 200 | invalidate_cache = request.POST.get('invalidate', False) |
|
201 | 201 | log.debug('rescanning repo location with destroy obsolete=%s and ' |
|
202 | 202 | 'install git hooks=%s' % (rm_obsolete,install_git_hooks)) |
|
203 | 203 | |
|
204 | 204 | if invalidate_cache: |
|
205 | 205 | log.debug('invalidating all repositories cache') |
|
206 | 206 | for repo in Repository.get_all(): |
|
207 | 207 | ScmModel().mark_for_invalidation(repo.repo_name, delete=True) |
|
208 | 208 | |
|
209 | 209 | filesystem_repos = ScmModel().repo_scan() |
|
210 | 210 | added, removed = repo2db_mapper(filesystem_repos, rm_obsolete, |
|
211 | 211 | install_git_hook=install_git_hooks, |
|
212 | 212 | user=c.authuser.username) |
|
213 | 213 | h.flash(h.literal(_('Repositories successfully rescanned. Added: %s. Removed: %s.') % |
|
214 | 214 | (', '.join(h.link_to(safe_unicode(repo_name), h.url('summary_home', repo_name=repo_name)) |
|
215 | 215 | for repo_name in added) or '-', |
|
216 | 216 | ', '.join(h.escape(safe_unicode(repo_name)) for repo_name in removed) or '-')), |
|
217 | 217 | category='success') |
|
218 | 218 | return redirect(url('admin_settings_mapping')) |
|
219 | 219 | |
|
220 | 220 | defaults = Setting.get_app_settings() |
|
221 | 221 | defaults.update(self._get_hg_ui_settings()) |
|
222 | 222 | |
|
223 | 223 | return htmlfill.render( |
|
224 | 224 | render('admin/settings/settings.html'), |
|
225 | 225 | defaults=defaults, |
|
226 | 226 | encoding="UTF-8", |
|
227 | 227 | force_defaults=False) |
|
228 | 228 | |
|
229 | 229 | @HasPermissionAllDecorator('hg.admin') |
|
230 | 230 | def settings_global(self): |
|
231 | 231 | """GET /admin/settings/global: All items in the collection""" |
|
232 | 232 | # url('admin_settings_global') |
|
233 | 233 | c.active = 'global' |
|
234 | 234 | if request.POST: |
|
235 | 235 | application_form = ApplicationSettingsForm()() |
|
236 | 236 | try: |
|
237 | 237 | form_result = application_form.to_python(dict(request.POST)) |
|
238 | 238 | except formencode.Invalid as errors: |
|
239 | 239 | return htmlfill.render( |
|
240 | 240 | render('admin/settings/settings.html'), |
|
241 | 241 | defaults=errors.value, |
|
242 | 242 | errors=errors.error_dict or {}, |
|
243 | 243 | prefix_error=False, |
|
244 | 244 | encoding="UTF-8", |
|
245 | 245 | force_defaults=False) |
|
246 | 246 | |
|
247 | 247 | try: |
|
248 | 248 | sett1 = Setting.create_or_update('title', |
|
249 | 249 | form_result['title']) |
|
250 | 250 | Session().add(sett1) |
|
251 | 251 | |
|
252 | 252 | sett2 = Setting.create_or_update('realm', |
|
253 | 253 | form_result['realm']) |
|
254 | 254 | Session().add(sett2) |
|
255 | 255 | |
|
256 | 256 | sett3 = Setting.create_or_update('ga_code', |
|
257 | 257 | form_result['ga_code']) |
|
258 | 258 | Session().add(sett3) |
|
259 | 259 | |
|
260 | 260 | sett4 = Setting.create_or_update('captcha_public_key', |
|
261 | 261 | form_result['captcha_public_key']) |
|
262 | 262 | Session().add(sett4) |
|
263 | 263 | |
|
264 | 264 | sett5 = Setting.create_or_update('captcha_private_key', |
|
265 | 265 | form_result['captcha_private_key']) |
|
266 | 266 | Session().add(sett5) |
|
267 | 267 | |
|
268 | 268 | Session().commit() |
|
269 | 269 | set_app_settings(config) |
|
270 | 270 | h.flash(_('Updated application settings'), category='success') |
|
271 | 271 | |
|
272 | 272 | except Exception: |
|
273 | 273 | log.error(traceback.format_exc()) |
|
274 | 274 | h.flash(_('Error occurred during updating ' |
|
275 | 275 | 'application settings'), |
|
276 | 276 | category='error') |
|
277 | 277 | |
|
278 | 278 | return redirect(url('admin_settings_global')) |
|
279 | 279 | |
|
280 | 280 | defaults = Setting.get_app_settings() |
|
281 | 281 | defaults.update(self._get_hg_ui_settings()) |
|
282 | 282 | |
|
283 | 283 | return htmlfill.render( |
|
284 | 284 | render('admin/settings/settings.html'), |
|
285 | 285 | defaults=defaults, |
|
286 | 286 | encoding="UTF-8", |
|
287 | 287 | force_defaults=False) |
|
288 | 288 | |
|
289 | 289 | @HasPermissionAllDecorator('hg.admin') |
|
290 | 290 | def settings_visual(self): |
|
291 | 291 | """GET /admin/settings/visual: All items in the collection""" |
|
292 | 292 | # url('admin_settings_visual') |
|
293 | 293 | c.active = 'visual' |
|
294 | 294 | if request.POST: |
|
295 | 295 | application_form = ApplicationVisualisationForm()() |
|
296 | 296 | try: |
|
297 | 297 | form_result = application_form.to_python(dict(request.POST)) |
|
298 | 298 | except formencode.Invalid as errors: |
|
299 | 299 | return htmlfill.render( |
|
300 | 300 | render('admin/settings/settings.html'), |
|
301 | 301 | defaults=errors.value, |
|
302 | 302 | errors=errors.error_dict or {}, |
|
303 | 303 | prefix_error=False, |
|
304 | 304 | encoding="UTF-8", |
|
305 | 305 | force_defaults=False) |
|
306 | 306 | |
|
307 | 307 | try: |
|
308 | 308 | settings = [ |
|
309 | 309 | ('show_public_icon', 'show_public_icon', 'bool'), |
|
310 | 310 | ('show_private_icon', 'show_private_icon', 'bool'), |
|
311 | 311 | ('stylify_metatags', 'stylify_metatags', 'bool'), |
|
312 | 312 | ('repository_fields', 'repository_fields', 'bool'), |
|
313 | 313 | ('dashboard_items', 'dashboard_items', 'int'), |
|
314 | 314 | ('admin_grid_items', 'admin_grid_items', 'int'), |
|
315 | 315 | ('show_version', 'show_version', 'bool'), |
|
316 | 316 | ('use_gravatar', 'use_gravatar', 'bool'), |
|
317 | 317 | ('gravatar_url', 'gravatar_url', 'unicode'), |
|
318 | 318 | ('clone_uri_tmpl', 'clone_uri_tmpl', 'unicode'), |
|
319 | 319 | ] |
|
320 | 320 | for setting, form_key, type_ in settings: |
|
321 | 321 | sett = Setting.create_or_update(setting, |
|
322 | 322 | form_result[form_key], type_) |
|
323 | 323 | Session().add(sett) |
|
324 | 324 | |
|
325 | 325 | Session().commit() |
|
326 | 326 | set_app_settings(config) |
|
327 | 327 | h.flash(_('Updated visualisation settings'), |
|
328 | 328 | category='success') |
|
329 | 329 | |
|
330 | 330 | except Exception: |
|
331 | 331 | log.error(traceback.format_exc()) |
|
332 | 332 | h.flash(_('Error occurred during updating ' |
|
333 | 333 | 'visualisation settings'), |
|
334 | 334 | category='error') |
|
335 | 335 | |
|
336 | 336 | return redirect(url('admin_settings_visual')) |
|
337 | 337 | |
|
338 | 338 | defaults = Setting.get_app_settings() |
|
339 | 339 | defaults.update(self._get_hg_ui_settings()) |
|
340 | 340 | |
|
341 | 341 | return htmlfill.render( |
|
342 | 342 | render('admin/settings/settings.html'), |
|
343 | 343 | defaults=defaults, |
|
344 | 344 | encoding="UTF-8", |
|
345 | 345 | force_defaults=False) |
|
346 | 346 | |
|
347 | 347 | @HasPermissionAllDecorator('hg.admin') |
|
348 | 348 | def settings_email(self): |
|
349 | 349 | """GET /admin/settings/email: All items in the collection""" |
|
350 | 350 | # url('admin_settings_email') |
|
351 | 351 | c.active = 'email' |
|
352 | 352 | if request.POST: |
|
353 | 353 | test_email = request.POST.get('test_email') |
|
354 | 354 | test_email_subj = 'Kallithea test email' |
|
355 | 355 | test_body = ('Kallithea Email test, ' |
|
356 | 356 | 'Kallithea version: %s' % c.kallithea_version) |
|
357 | 357 | if not test_email: |
|
358 | 358 | h.flash(_('Please enter email address'), category='error') |
|
359 | 359 | return redirect(url('admin_settings_email')) |
|
360 | 360 | |
|
361 | 361 | test_email_txt_body = EmailNotificationModel()\ |
|
362 | 362 | .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT, |
|
363 | 363 | 'txt', body=test_body) |
|
364 | 364 | test_email_html_body = EmailNotificationModel()\ |
|
365 | 365 | .get_email_tmpl(EmailNotificationModel.TYPE_DEFAULT, |
|
366 | 366 | 'html', body=test_body) |
|
367 | 367 | |
|
368 | 368 | recipients = [test_email] if test_email else None |
|
369 | 369 | |
|
370 | 370 | run_task(tasks.send_email, recipients, test_email_subj, |
|
371 | 371 | test_email_txt_body, test_email_html_body) |
|
372 | 372 | |
|
373 | 373 | h.flash(_('Send email task created'), category='success') |
|
374 | 374 | return redirect(url('admin_settings_email')) |
|
375 | 375 | |
|
376 | 376 | defaults = Setting.get_app_settings() |
|
377 | 377 | defaults.update(self._get_hg_ui_settings()) |
|
378 | 378 | |
|
379 | 379 | import kallithea |
|
380 | 380 | c.ini = kallithea.CONFIG |
|
381 | 381 | |
|
382 | 382 | return htmlfill.render( |
|
383 | 383 | render('admin/settings/settings.html'), |
|
384 | 384 | defaults=defaults, |
|
385 | 385 | encoding="UTF-8", |
|
386 | 386 | force_defaults=False) |
|
387 | 387 | |
|
388 | 388 | @HasPermissionAllDecorator('hg.admin') |
|
389 | 389 | def settings_hooks(self): |
|
390 | 390 | """GET /admin/settings/hooks: All items in the collection""" |
|
391 | 391 | # url('admin_settings_hooks') |
|
392 | 392 | c.active = 'hooks' |
|
393 | 393 | if request.POST: |
|
394 | 394 | if c.visual.allow_custom_hooks_settings: |
|
395 | 395 | ui_key = request.POST.get('new_hook_ui_key') |
|
396 | 396 | ui_value = request.POST.get('new_hook_ui_value') |
|
397 | 397 | |
|
398 | 398 | hook_id = request.POST.get('hook_id') |
|
399 | 399 | |
|
400 | 400 | try: |
|
401 | 401 | ui_key = ui_key and ui_key.strip() |
|
402 | 402 | if ui_value and ui_key: |
|
403 | 403 | Ui.create_or_update_hook(ui_key, ui_value) |
|
404 | 404 | h.flash(_('Added new hook'), category='success') |
|
405 | 405 | elif hook_id: |
|
406 | 406 | Ui.delete(hook_id) |
|
407 | 407 | Session().commit() |
|
408 | 408 | |
|
409 | 409 | # check for edits |
|
410 | 410 | update = False |
|
411 | 411 | _d = request.POST.dict_of_lists() |
|
412 | 412 | for k, v in zip(_d.get('hook_ui_key', []), |
|
413 | 413 | _d.get('hook_ui_value_new', [])): |
|
414 | 414 | Ui.create_or_update_hook(k, v) |
|
415 | 415 | update = True |
|
416 | 416 | |
|
417 | 417 | if update: |
|
418 | 418 | h.flash(_('Updated hooks'), category='success') |
|
419 | 419 | Session().commit() |
|
420 | 420 | except Exception: |
|
421 | 421 | log.error(traceback.format_exc()) |
|
422 | 422 | h.flash(_('Error occurred during hook creation'), |
|
423 | 423 | category='error') |
|
424 | 424 | |
|
425 | 425 | return redirect(url('admin_settings_hooks')) |
|
426 | 426 | |
|
427 | 427 | defaults = Setting.get_app_settings() |
|
428 | 428 | defaults.update(self._get_hg_ui_settings()) |
|
429 | 429 | |
|
430 | 430 | c.hooks = Ui.get_builtin_hooks() |
|
431 | 431 | c.custom_hooks = Ui.get_custom_hooks() |
|
432 | 432 | |
|
433 | 433 | return htmlfill.render( |
|
434 | 434 | render('admin/settings/settings.html'), |
|
435 | 435 | defaults=defaults, |
|
436 | 436 | encoding="UTF-8", |
|
437 | 437 | force_defaults=False) |
|
438 | 438 | |
|
439 | 439 | @HasPermissionAllDecorator('hg.admin') |
|
440 | 440 | def settings_search(self): |
|
441 | 441 | """GET /admin/settings/search: All items in the collection""" |
|
442 | 442 | # url('admin_settings_search') |
|
443 | 443 | c.active = 'search' |
|
444 | 444 | if request.POST: |
|
445 | 445 | repo_location = self._get_hg_ui_settings()['paths_root_path'] |
|
446 | 446 | full_index = request.POST.get('full_index', False) |
|
447 | 447 | run_task(tasks.whoosh_index, repo_location, full_index) |
|
448 | 448 | h.flash(_('Whoosh reindex task scheduled'), category='success') |
|
449 | 449 | return redirect(url('admin_settings_search')) |
|
450 | 450 | |
|
451 | 451 | defaults = Setting.get_app_settings() |
|
452 | 452 | defaults.update(self._get_hg_ui_settings()) |
|
453 | 453 | |
|
454 | 454 | return htmlfill.render( |
|
455 | 455 | render('admin/settings/settings.html'), |
|
456 | 456 | defaults=defaults, |
|
457 | 457 | encoding="UTF-8", |
|
458 | 458 | force_defaults=False) |
|
459 | 459 | |
|
460 | 460 | @HasPermissionAllDecorator('hg.admin') |
|
461 | 461 | def settings_system(self): |
|
462 | 462 | """GET /admin/settings/system: All items in the collection""" |
|
463 | 463 | # url('admin_settings_system') |
|
464 | 464 | c.active = 'system' |
|
465 | 465 | |
|
466 | 466 | defaults = Setting.get_app_settings() |
|
467 | 467 | defaults.update(self._get_hg_ui_settings()) |
|
468 | 468 | |
|
469 | 469 | import kallithea |
|
470 | 470 | c.ini = kallithea.CONFIG |
|
471 | 471 | c.update_url = defaults.get('update_url') |
|
472 | 472 | server_info = Setting.get_server_info() |
|
473 | 473 | for key, val in server_info.iteritems(): |
|
474 | 474 | setattr(c, key, val) |
|
475 | 475 | |
|
476 | 476 | return htmlfill.render( |
|
477 | 477 | render('admin/settings/settings.html'), |
|
478 | 478 | defaults=defaults, |
|
479 | 479 | encoding="UTF-8", |
|
480 | 480 | force_defaults=False) |
|
481 | 481 | |
|
482 | 482 | @HasPermissionAllDecorator('hg.admin') |
|
483 | 483 | def settings_system_update(self): |
|
484 | 484 | """GET /admin/settings/system/updates: All items in the collection""" |
|
485 | 485 | # url('admin_settings_system_update') |
|
486 | 486 | import json |
|
487 | 487 | import urllib2 |
|
488 | 488 | from kallithea.lib.verlib import NormalizedVersion |
|
489 | 489 | from kallithea import __version__ |
|
490 | 490 | |
|
491 | 491 | defaults = Setting.get_app_settings() |
|
492 | 492 | defaults.update(self._get_hg_ui_settings()) |
|
493 | 493 | _update_url = defaults.get('update_url', '') |
|
494 | 494 | _update_url = "" # FIXME: disabled |
|
495 | 495 | |
|
496 | 496 | _err = lambda s: '<div style="color:#ff8888; padding:4px 0px">%s</div>' % (s) |
|
497 | 497 | try: |
|
498 | 498 | import kallithea |
|
499 | 499 | ver = kallithea.__version__ |
|
500 |
log.debug('Checking for upgrade on `%s` server' |
|
|
500 | log.debug('Checking for upgrade on `%s` server', _update_url) | |
|
501 | 501 | opener = urllib2.build_opener() |
|
502 | 502 | opener.addheaders = [('User-agent', 'Kallithea-SCM/%s' % ver)] |
|
503 | 503 | response = opener.open(_update_url) |
|
504 | 504 | response_data = response.read() |
|
505 | 505 | data = json.loads(response_data) |
|
506 | 506 | except urllib2.URLError as e: |
|
507 | 507 | log.error(traceback.format_exc()) |
|
508 | 508 | return _err('Failed to contact upgrade server: %r' % e) |
|
509 | 509 | except ValueError as e: |
|
510 | 510 | log.error(traceback.format_exc()) |
|
511 | 511 | return _err('Bad data sent from update server') |
|
512 | 512 | |
|
513 | 513 | latest = data['versions'][0] |
|
514 | 514 | |
|
515 | 515 | c.update_url = _update_url |
|
516 | 516 | c.latest_data = latest |
|
517 | 517 | c.latest_ver = latest['version'] |
|
518 | 518 | c.cur_ver = __version__ |
|
519 | 519 | c.should_upgrade = False |
|
520 | 520 | |
|
521 | 521 | if NormalizedVersion(c.latest_ver) > NormalizedVersion(c.cur_ver): |
|
522 | 522 | c.should_upgrade = True |
|
523 | 523 | c.important_notices = latest['general'] |
|
524 | 524 | |
|
525 | 525 | return render('admin/settings/settings_system_update.html'), |
@@ -1,302 +1,302 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.api |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | JSON RPC controller |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Aug 20, 2011 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import inspect |
|
29 | 29 | import logging |
|
30 | 30 | import types |
|
31 | 31 | import traceback |
|
32 | 32 | import time |
|
33 | 33 | |
|
34 | 34 | from paste.response import replace_header |
|
35 | 35 | from pylons.controllers import WSGIController |
|
36 | 36 | |
|
37 | 37 | from webob.exc import HTTPError |
|
38 | 38 | |
|
39 | 39 | from kallithea.model.db import User |
|
40 | 40 | from kallithea.model import meta |
|
41 | 41 | from kallithea.lib.compat import izip_longest, json |
|
42 | 42 | from kallithea.lib.auth import AuthUser |
|
43 | 43 | from kallithea.lib.base import _get_ip_addr as _get_ip, _get_access_path |
|
44 | 44 | from kallithea.lib.utils2 import safe_unicode, safe_str |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger('JSONRPC') |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class JSONRPCError(BaseException): |
|
50 | 50 | |
|
51 | 51 | def __init__(self, message): |
|
52 | 52 | self.message = message |
|
53 | 53 | super(JSONRPCError, self).__init__() |
|
54 | 54 | |
|
55 | 55 | def __str__(self): |
|
56 | 56 | return safe_str(self.message) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def jsonrpc_error(message, retid=None, code=None): |
|
60 | 60 | """ |
|
61 | 61 | Generate a Response object with a JSON-RPC error body |
|
62 | 62 | |
|
63 | 63 | :param code: |
|
64 | 64 | :param retid: |
|
65 | 65 | :param message: |
|
66 | 66 | """ |
|
67 | 67 | from pylons.controllers.util import Response |
|
68 | 68 | return Response( |
|
69 | 69 | body=json.dumps(dict(id=retid, result=None, error=message)), |
|
70 | 70 | status=code, |
|
71 | 71 | content_type='application/json' |
|
72 | 72 | ) |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | class JSONRPCController(WSGIController): |
|
76 | 76 | """ |
|
77 | 77 | A WSGI-speaking JSON-RPC controller class |
|
78 | 78 | |
|
79 | 79 | See the specification: |
|
80 | 80 | <http://json-rpc.org/wiki/specification>`. |
|
81 | 81 | |
|
82 | 82 | Valid controller return values should be json-serializable objects. |
|
83 | 83 | |
|
84 | 84 | Sub-classes should catch their exceptions and raise JSONRPCError |
|
85 | 85 | if they want to pass meaningful errors to the client. |
|
86 | 86 | |
|
87 | 87 | """ |
|
88 | 88 | |
|
89 | 89 | def _get_ip_addr(self, environ): |
|
90 | 90 | return _get_ip(environ) |
|
91 | 91 | |
|
92 | 92 | def _get_method_args(self): |
|
93 | 93 | """ |
|
94 | 94 | Return `self._rpc_args` to dispatched controller method |
|
95 | 95 | chosen by __call__ |
|
96 | 96 | """ |
|
97 | 97 | return self._rpc_args |
|
98 | 98 | |
|
99 | 99 | def __call__(self, environ, start_response): |
|
100 | 100 | """ |
|
101 | 101 | Parse the request body as JSON, look up the method on the |
|
102 | 102 | controller and if it exists, dispatch to it. |
|
103 | 103 | """ |
|
104 | 104 | try: |
|
105 | 105 | return self._handle_request(environ, start_response) |
|
106 | 106 | finally: |
|
107 | 107 | meta.Session.remove() |
|
108 | 108 | |
|
109 | 109 | def _handle_request(self, environ, start_response): |
|
110 | 110 | start = time.time() |
|
111 | 111 | ip_addr = self.ip_addr = self._get_ip_addr(environ) |
|
112 | 112 | self._req_id = None |
|
113 | 113 | if 'CONTENT_LENGTH' not in environ: |
|
114 | 114 | log.debug("No Content-Length") |
|
115 | 115 | return jsonrpc_error(retid=self._req_id, |
|
116 | 116 | message="No Content-Length in request") |
|
117 | 117 | else: |
|
118 | 118 | length = environ['CONTENT_LENGTH'] or 0 |
|
119 | 119 | length = int(environ['CONTENT_LENGTH']) |
|
120 |
log.debug('Content-Length: %s' |
|
|
120 | log.debug('Content-Length: %s', length) | |
|
121 | 121 | |
|
122 | 122 | if length == 0: |
|
123 | 123 | log.debug("Content-Length is 0") |
|
124 | 124 | return jsonrpc_error(retid=self._req_id, |
|
125 | 125 | message="Content-Length is 0") |
|
126 | 126 | |
|
127 | 127 | raw_body = environ['wsgi.input'].read(length) |
|
128 | 128 | |
|
129 | 129 | try: |
|
130 | 130 | json_body = json.loads(raw_body) |
|
131 | 131 | except ValueError as e: |
|
132 | 132 | # catch JSON errors Here |
|
133 | 133 | return jsonrpc_error(retid=self._req_id, |
|
134 | 134 | message="JSON parse error ERR:%s RAW:%r" |
|
135 | 135 | % (e, raw_body)) |
|
136 | 136 | |
|
137 | 137 | # check AUTH based on API key |
|
138 | 138 | try: |
|
139 | 139 | self._req_api_key = json_body['api_key'] |
|
140 | 140 | self._req_id = json_body['id'] |
|
141 | 141 | self._req_method = json_body['method'] |
|
142 | 142 | self._request_params = json_body['args'] |
|
143 | 143 | if not isinstance(self._request_params, dict): |
|
144 | 144 | self._request_params = {} |
|
145 | 145 | |
|
146 | 146 | log.debug( |
|
147 |
'method: %s, params: %s' |
|
|
148 |
self._request_params |
|
|
147 | 'method: %s, params: %s', self._req_method, | |
|
148 | self._request_params | |
|
149 | 149 | ) |
|
150 | 150 | except KeyError as e: |
|
151 | 151 | return jsonrpc_error(retid=self._req_id, |
|
152 | 152 | message='Incorrect JSON query missing %s' % e) |
|
153 | 153 | |
|
154 | 154 | # check if we can find this session using api_key |
|
155 | 155 | try: |
|
156 | 156 | u = User.get_by_api_key(self._req_api_key) |
|
157 | 157 | if u is None: |
|
158 | 158 | return jsonrpc_error(retid=self._req_id, |
|
159 | 159 | message='Invalid API key') |
|
160 | 160 | |
|
161 | 161 | auth_u = AuthUser(dbuser=u) |
|
162 | 162 | if not AuthUser.check_ip_allowed(auth_u, ip_addr): |
|
163 | 163 | return jsonrpc_error(retid=self._req_id, |
|
164 | 164 | message='request from IP:%s not allowed' % (ip_addr,)) |
|
165 | 165 | else: |
|
166 |
log.info('Access for IP:%s allowed' |
|
|
166 | log.info('Access for IP:%s allowed', ip_addr) | |
|
167 | 167 | |
|
168 | 168 | except Exception as e: |
|
169 | 169 | return jsonrpc_error(retid=self._req_id, |
|
170 | 170 | message='Invalid API key') |
|
171 | 171 | |
|
172 | 172 | self._error = None |
|
173 | 173 | try: |
|
174 | 174 | self._func = self._find_method() |
|
175 | 175 | except AttributeError as e: |
|
176 | 176 | return jsonrpc_error(retid=self._req_id, |
|
177 | 177 | message=str(e)) |
|
178 | 178 | |
|
179 | 179 | # now that we have a method, add self._req_params to |
|
180 | 180 | # self.kargs and dispatch control to WGIController |
|
181 | 181 | argspec = inspect.getargspec(self._func) |
|
182 | 182 | arglist = argspec[0][1:] |
|
183 | 183 | defaults = map(type, argspec[3] or []) |
|
184 | 184 | default_empty = types.NotImplementedType |
|
185 | 185 | |
|
186 | 186 | # kw arguments required by this method |
|
187 | 187 | func_kwargs = dict(izip_longest(reversed(arglist), reversed(defaults), |
|
188 | 188 | fillvalue=default_empty)) |
|
189 | 189 | |
|
190 | 190 | # this is little trick to inject logged in user for |
|
191 | 191 | # perms decorators to work they expect the controller class to have |
|
192 | 192 | # authuser attribute set |
|
193 | 193 | self.authuser = auth_u |
|
194 | 194 | |
|
195 | 195 | # This attribute will need to be first param of a method that uses |
|
196 | 196 | # api_key, which is translated to instance of user at that name |
|
197 | 197 | USER_SESSION_ATTR = 'apiuser' |
|
198 | 198 | |
|
199 | 199 | if USER_SESSION_ATTR not in arglist: |
|
200 | 200 | return jsonrpc_error( |
|
201 | 201 | retid=self._req_id, |
|
202 | 202 | message='This method [%s] does not support ' |
|
203 | 203 | 'authentication (missing %s param)' % ( |
|
204 | 204 | self._func.__name__, USER_SESSION_ATTR) |
|
205 | 205 | ) |
|
206 | 206 | |
|
207 | 207 | # get our arglist and check if we provided them as args |
|
208 | 208 | for arg, default in func_kwargs.iteritems(): |
|
209 | 209 | if arg == USER_SESSION_ATTR: |
|
210 | 210 | # USER_SESSION_ATTR is something translated from API key and |
|
211 | 211 | # this is checked before so we don't need validate it |
|
212 | 212 | continue |
|
213 | 213 | |
|
214 | 214 | # skip the required param check if it's default value is |
|
215 | 215 | # NotImplementedType (default_empty) |
|
216 | 216 | if default == default_empty and arg not in self._request_params: |
|
217 | 217 | return jsonrpc_error( |
|
218 | 218 | retid=self._req_id, |
|
219 | 219 | message=( |
|
220 | 220 | 'Missing non optional `%s` arg in JSON DATA' % arg |
|
221 | 221 | ) |
|
222 | 222 | ) |
|
223 | 223 | |
|
224 | 224 | self._rpc_args = {USER_SESSION_ATTR: u} |
|
225 | 225 | |
|
226 | 226 | self._rpc_args.update(self._request_params) |
|
227 | 227 | |
|
228 | 228 | self._rpc_args['action'] = self._req_method |
|
229 | 229 | self._rpc_args['environ'] = environ |
|
230 | 230 | self._rpc_args['start_response'] = start_response |
|
231 | 231 | |
|
232 | 232 | status = [] |
|
233 | 233 | headers = [] |
|
234 | 234 | exc_info = [] |
|
235 | 235 | |
|
236 | 236 | def change_content(new_status, new_headers, new_exc_info=None): |
|
237 | 237 | status.append(new_status) |
|
238 | 238 | headers.extend(new_headers) |
|
239 | 239 | exc_info.append(new_exc_info) |
|
240 | 240 | |
|
241 | 241 | output = WSGIController.__call__(self, environ, change_content) |
|
242 | 242 | output = list(output) |
|
243 | 243 | headers.append(('Content-Length', str(len(output[0])))) |
|
244 | 244 | replace_header(headers, 'Content-Type', 'application/json') |
|
245 | 245 | start_response(status[0], headers, exc_info[0]) |
|
246 | 246 | log.info('IP: %s Request to %s time: %.3fs' % ( |
|
247 | 247 | self._get_ip_addr(environ), |
|
248 | 248 | safe_unicode(_get_access_path(environ)), time.time() - start) |
|
249 | 249 | ) |
|
250 | 250 | return output |
|
251 | 251 | |
|
252 | 252 | def _dispatch_call(self): |
|
253 | 253 | """ |
|
254 | 254 | Implement dispatch interface specified by WSGIController |
|
255 | 255 | """ |
|
256 | 256 | raw_response = '' |
|
257 | 257 | try: |
|
258 | 258 | raw_response = self._inspect_call(self._func) |
|
259 | 259 | if isinstance(raw_response, HTTPError): |
|
260 | 260 | self._error = str(raw_response) |
|
261 | 261 | except JSONRPCError as e: |
|
262 | 262 | self._error = safe_str(e) |
|
263 | 263 | except Exception as e: |
|
264 | log.error('Encountered unhandled exception: %s' | |
|
265 |
|
|
|
264 | log.error('Encountered unhandled exception: %s', | |
|
265 | traceback.format_exc(),) | |
|
266 | 266 | json_exc = JSONRPCError('Internal server error') |
|
267 | 267 | self._error = safe_str(json_exc) |
|
268 | 268 | |
|
269 | 269 | if self._error is not None: |
|
270 | 270 | raw_response = None |
|
271 | 271 | |
|
272 | 272 | response = dict(id=self._req_id, result=raw_response, error=self._error) |
|
273 | 273 | try: |
|
274 | 274 | return json.dumps(response) |
|
275 | 275 | except TypeError as e: |
|
276 |
log.error('API FAILED. Error encoding response: %s' |
|
|
276 | log.error('API FAILED. Error encoding response: %s', e) | |
|
277 | 277 | return json.dumps( |
|
278 | 278 | dict( |
|
279 | 279 | id=self._req_id, |
|
280 | 280 | result=None, |
|
281 | 281 | error="Error encoding response" |
|
282 | 282 | ) |
|
283 | 283 | ) |
|
284 | 284 | |
|
285 | 285 | def _find_method(self): |
|
286 | 286 | """ |
|
287 | 287 | Return method named by `self._req_method` in controller if able |
|
288 | 288 | """ |
|
289 |
log.debug('Trying to find JSON-RPC method: %s' |
|
|
289 | log.debug('Trying to find JSON-RPC method: %s', self._req_method) | |
|
290 | 290 | if self._req_method.startswith('_'): |
|
291 | 291 | raise AttributeError("Method not allowed") |
|
292 | 292 | |
|
293 | 293 | try: |
|
294 | 294 | func = getattr(self, self._req_method, None) |
|
295 | 295 | except UnicodeEncodeError: |
|
296 | 296 | raise AttributeError("Problem decoding unicode in requested " |
|
297 | 297 | "method name.") |
|
298 | 298 | |
|
299 | 299 | if isinstance(func, types.MethodType): |
|
300 | 300 | return func |
|
301 | 301 | else: |
|
302 | 302 | raise AttributeError("No such method: %s" % (self._req_method,)) |
@@ -1,198 +1,198 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.changelog |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | changelog controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 21, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | |
|
31 | 31 | from pylons import request, url, session, tmpl_context as c |
|
32 | 32 | from pylons.controllers.util import redirect |
|
33 | 33 | from pylons.i18n.translation import _ |
|
34 | 34 | from webob.exc import HTTPNotFound, HTTPBadRequest |
|
35 | 35 | |
|
36 | 36 | import kallithea.lib.helpers as h |
|
37 | 37 | from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
38 | 38 | from kallithea.lib.base import BaseRepoController, render |
|
39 | 39 | from kallithea.lib.helpers import RepoPage |
|
40 | 40 | from kallithea.lib.compat import json |
|
41 | 41 | from kallithea.lib.graphmod import graph_data |
|
42 | 42 | from kallithea.lib.vcs.exceptions import RepositoryError, ChangesetDoesNotExistError,\ |
|
43 | 43 | ChangesetError, NodeDoesNotExistError, EmptyRepositoryError |
|
44 | 44 | from kallithea.lib.utils2 import safe_int, safe_str |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | log = logging.getLogger(__name__) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def _load_changelog_summary(): |
|
51 | 51 | p = safe_int(request.GET.get('page'), 1) |
|
52 | 52 | size = safe_int(request.GET.get('size'), 10) |
|
53 | 53 | |
|
54 | 54 | def url_generator(**kw): |
|
55 | 55 | return url('changelog_summary_home', |
|
56 | 56 | repo_name=c.db_repo.repo_name, size=size, **kw) |
|
57 | 57 | |
|
58 | 58 | collection = c.db_repo_scm_instance |
|
59 | 59 | |
|
60 | 60 | c.repo_changesets = RepoPage(collection, page=p, |
|
61 | 61 | items_per_page=size, |
|
62 | 62 | url=url_generator) |
|
63 | 63 | page_revisions = [x.raw_id for x in list(c.repo_changesets)] |
|
64 | 64 | c.comments = c.db_repo.get_comments(page_revisions) |
|
65 | 65 | c.statuses = c.db_repo.statuses(page_revisions) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | class ChangelogController(BaseRepoController): |
|
69 | 69 | |
|
70 | 70 | def __before__(self): |
|
71 | 71 | super(ChangelogController, self).__before__() |
|
72 | 72 | c.affected_files_cut_off = 60 |
|
73 | 73 | |
|
74 | 74 | @staticmethod |
|
75 | 75 | def __get_cs(rev, repo): |
|
76 | 76 | """ |
|
77 | 77 | Safe way to get changeset. If error occur fail with error message. |
|
78 | 78 | |
|
79 | 79 | :param rev: revision to fetch |
|
80 | 80 | :param repo: repo instance |
|
81 | 81 | """ |
|
82 | 82 | |
|
83 | 83 | try: |
|
84 | 84 | return c.db_repo_scm_instance.get_changeset(rev) |
|
85 | 85 | except EmptyRepositoryError as e: |
|
86 | 86 | h.flash(h.literal(_('There are no changesets yet')), |
|
87 | 87 | category='error') |
|
88 | 88 | except RepositoryError as e: |
|
89 | 89 | log.error(traceback.format_exc()) |
|
90 | 90 | h.flash(safe_str(e), category='error') |
|
91 | 91 | raise HTTPBadRequest() |
|
92 | 92 | |
|
93 | 93 | @LoginRequired() |
|
94 | 94 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
95 | 95 | 'repository.admin') |
|
96 | 96 | def index(self, repo_name, revision=None, f_path=None): |
|
97 | 97 | # Fix URL after page size form submission via GET |
|
98 | 98 | # TODO: Somehow just don't send this extra junk in the GET URL |
|
99 | 99 | if request.GET.get('set'): |
|
100 | 100 | request.GET.pop('set', None) |
|
101 | 101 | request.GET.pop('_authentication_token', None) |
|
102 | 102 | if revision is None: |
|
103 | 103 | return redirect(url('changelog_home', repo_name=repo_name, **request.GET)) |
|
104 | 104 | return redirect(url('changelog_file_home', repo_name=repo_name, revision=revision, f_path=f_path, **request.GET)) |
|
105 | 105 | |
|
106 | 106 | limit = 2000 |
|
107 | 107 | default = 100 |
|
108 | 108 | if request.GET.get('size'): |
|
109 | 109 | c.size = max(min(safe_int(request.GET.get('size')), limit), 1) |
|
110 | 110 | session['changelog_size'] = c.size |
|
111 | 111 | session.save() |
|
112 | 112 | else: |
|
113 | 113 | c.size = int(session.get('changelog_size', default)) |
|
114 | 114 | # min size must be 1 |
|
115 | 115 | c.size = max(c.size, 1) |
|
116 | 116 | p = safe_int(request.GET.get('page', 1), 1) |
|
117 | 117 | branch_name = request.GET.get('branch', None) |
|
118 | 118 | if (branch_name and |
|
119 | 119 | branch_name not in c.db_repo_scm_instance.branches and |
|
120 | 120 | branch_name not in c.db_repo_scm_instance.closed_branches and |
|
121 | 121 | not revision): |
|
122 | 122 | return redirect(url('changelog_file_home', repo_name=c.repo_name, |
|
123 | 123 | revision=branch_name, f_path=f_path or '')) |
|
124 | 124 | |
|
125 | 125 | if revision == 'tip': |
|
126 | 126 | revision = None |
|
127 | 127 | |
|
128 | 128 | c.changelog_for_path = f_path |
|
129 | 129 | try: |
|
130 | 130 | |
|
131 | 131 | if f_path: |
|
132 |
log.debug('generating changelog for path %s' |
|
|
132 | log.debug('generating changelog for path %s', f_path) | |
|
133 | 133 | # get the history for the file ! |
|
134 | 134 | tip_cs = c.db_repo_scm_instance.get_changeset() |
|
135 | 135 | try: |
|
136 | 136 | collection = tip_cs.get_file_history(f_path) |
|
137 | 137 | except (NodeDoesNotExistError, ChangesetError): |
|
138 | 138 | #this node is not present at tip ! |
|
139 | 139 | try: |
|
140 | 140 | cs = self.__get_cs(revision, repo_name) |
|
141 | 141 | collection = cs.get_file_history(f_path) |
|
142 | 142 | except RepositoryError as e: |
|
143 | 143 | h.flash(safe_str(e), category='warning') |
|
144 | 144 | redirect(h.url('changelog_home', repo_name=repo_name)) |
|
145 | 145 | collection = list(reversed(collection)) |
|
146 | 146 | else: |
|
147 | 147 | collection = c.db_repo_scm_instance.get_changesets(start=0, end=revision, |
|
148 | 148 | branch_name=branch_name) |
|
149 | 149 | c.total_cs = len(collection) |
|
150 | 150 | |
|
151 | 151 | c.pagination = RepoPage(collection, page=p, item_count=c.total_cs, |
|
152 | 152 | items_per_page=c.size, branch=branch_name,) |
|
153 | 153 | |
|
154 | 154 | page_revisions = [x.raw_id for x in c.pagination] |
|
155 | 155 | c.comments = c.db_repo.get_comments(page_revisions) |
|
156 | 156 | c.statuses = c.db_repo.statuses(page_revisions) |
|
157 | 157 | except EmptyRepositoryError as e: |
|
158 | 158 | h.flash(safe_str(e), category='warning') |
|
159 | 159 | return redirect(url('summary_home', repo_name=c.repo_name)) |
|
160 | 160 | except (RepositoryError, ChangesetDoesNotExistError, Exception) as e: |
|
161 | 161 | log.error(traceback.format_exc()) |
|
162 | 162 | h.flash(safe_str(e), category='error') |
|
163 | 163 | return redirect(url('changelog_home', repo_name=c.repo_name)) |
|
164 | 164 | |
|
165 | 165 | c.branch_name = branch_name |
|
166 | 166 | c.branch_filters = [('', _('None'))] + \ |
|
167 | 167 | [(k, k) for k in c.db_repo_scm_instance.branches.keys()] |
|
168 | 168 | if c.db_repo_scm_instance.closed_branches: |
|
169 | 169 | prefix = _('(closed)') + ' ' |
|
170 | 170 | c.branch_filters += [('-', '-')] + \ |
|
171 | 171 | [(k, prefix + k) for k in c.db_repo_scm_instance.closed_branches.keys()] |
|
172 | 172 | revs = [] |
|
173 | 173 | if not f_path: |
|
174 | 174 | revs = [x.revision for x in c.pagination] |
|
175 | 175 | c.jsdata = json.dumps(graph_data(c.db_repo_scm_instance, revs)) |
|
176 | 176 | |
|
177 | 177 | c.revision = revision # requested revision ref |
|
178 | 178 | c.first_revision = c.pagination[0] # pagination is never empty here! |
|
179 | 179 | return render('changelog/changelog.html') |
|
180 | 180 | |
|
181 | 181 | @LoginRequired() |
|
182 | 182 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
183 | 183 | 'repository.admin') |
|
184 | 184 | def changelog_details(self, cs): |
|
185 | 185 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
186 | 186 | c.cs = c.db_repo_scm_instance.get_changeset(cs) |
|
187 | 187 | return render('changelog/changelog_details.html') |
|
188 | 188 | raise HTTPNotFound() |
|
189 | 189 | |
|
190 | 190 | @LoginRequired() |
|
191 | 191 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
192 | 192 | 'repository.admin') |
|
193 | 193 | def changelog_summary(self, repo_name): |
|
194 | 194 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
195 | 195 | _load_changelog_summary() |
|
196 | 196 | |
|
197 | 197 | return render('changelog/changelog_summary_data.html') |
|
198 | 198 | raise HTTPNotFound() |
@@ -1,293 +1,293 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.compare |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | compare controller for pylons showing differences between two |
|
19 | 19 | repos, branches, bookmarks or tips |
|
20 | 20 | |
|
21 | 21 | This file was forked by the Kallithea project in July 2014. |
|
22 | 22 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | 23 | :created_on: May 6, 2012 |
|
24 | 24 | :author: marcink |
|
25 | 25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | 26 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | import logging |
|
31 | 31 | import re |
|
32 | 32 | |
|
33 | 33 | from webob.exc import HTTPBadRequest |
|
34 | 34 | from pylons import request, tmpl_context as c, url |
|
35 | 35 | from pylons.controllers.util import redirect |
|
36 | 36 | from pylons.i18n.translation import _ |
|
37 | 37 | |
|
38 | 38 | from kallithea.lib.vcs.utils.hgcompat import unionrepo |
|
39 | 39 | from kallithea.lib import helpers as h |
|
40 | 40 | from kallithea.lib.base import BaseRepoController, render |
|
41 | 41 | from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
42 | 42 | from kallithea.lib import diffs |
|
43 | 43 | from kallithea.model.db import Repository |
|
44 | 44 | from kallithea.lib.diffs import LimitedDiffContainer |
|
45 | 45 | from kallithea.controllers.changeset import _ignorews_url,\ |
|
46 | 46 | _context_url, get_line_ctx, get_ignore_ws |
|
47 | 47 | from kallithea.lib.graphmod import graph_data |
|
48 | 48 | from kallithea.lib.compat import json |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class CompareController(BaseRepoController): |
|
54 | 54 | |
|
55 | 55 | def __before__(self): |
|
56 | 56 | super(CompareController, self).__before__() |
|
57 | 57 | |
|
58 | 58 | @staticmethod |
|
59 | 59 | def _get_changesets(alias, org_repo, org_rev, other_repo, other_rev): |
|
60 | 60 | """ |
|
61 | 61 | Returns lists of changesets that can be merged from org_repo@org_rev |
|
62 | 62 | to other_repo@other_rev |
|
63 | 63 | ... and the other way |
|
64 | 64 | ... and the ancestor that would be used for merge |
|
65 | 65 | |
|
66 | 66 | :param org_repo: repo object, that is most likely the original repo we forked from |
|
67 | 67 | :param org_rev: the revision we want our compare to be made |
|
68 | 68 | :param other_repo: repo object, most likely the fork of org_repo. It has |
|
69 | 69 | all changesets that we need to obtain |
|
70 | 70 | :param other_rev: revision we want out compare to be made on other_repo |
|
71 | 71 | """ |
|
72 | 72 | ancestor = None |
|
73 | 73 | if org_rev == other_rev: |
|
74 | 74 | org_changesets = [] |
|
75 | 75 | other_changesets = [] |
|
76 | 76 | ancestor = org_rev |
|
77 | 77 | |
|
78 | 78 | elif alias == 'hg': |
|
79 | 79 | #case two independent repos |
|
80 | 80 | if org_repo != other_repo: |
|
81 | 81 | hgrepo = unionrepo.unionrepository(other_repo.baseui, |
|
82 | 82 | other_repo.path, |
|
83 | 83 | org_repo.path) |
|
84 | 84 | # all ancestors of other_rev will be in other_repo and |
|
85 | 85 | # rev numbers from hgrepo can be used in other_repo - org_rev ancestors cannot |
|
86 | 86 | |
|
87 | 87 | #no remote compare do it on the same repository |
|
88 | 88 | else: |
|
89 | 89 | hgrepo = other_repo._repo |
|
90 | 90 | |
|
91 | 91 | if org_repo.EMPTY_CHANGESET in (org_rev, other_rev): |
|
92 | 92 | # work around unexpected behaviour in Mercurial < 3.4 |
|
93 | 93 | ancestor = org_repo.EMPTY_CHANGESET |
|
94 | 94 | else: |
|
95 | 95 | ancestors = hgrepo.revs("ancestor(id(%s), id(%s))", org_rev, other_rev) |
|
96 | 96 | if ancestors: |
|
97 | 97 | # FIXME: picks arbitrary ancestor - but there is usually only one |
|
98 | 98 | try: |
|
99 | 99 | ancestor = hgrepo[ancestors.first()].hex() |
|
100 | 100 | except AttributeError: |
|
101 | 101 | # removed in hg 3.2 |
|
102 | 102 | ancestor = hgrepo[ancestors[0]].hex() |
|
103 | 103 | |
|
104 | 104 | other_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
105 | 105 | other_rev, org_rev, org_rev) |
|
106 | 106 | other_changesets = [other_repo.get_changeset(rev) for rev in other_revs] |
|
107 | 107 | org_revs = hgrepo.revs("ancestors(id(%s)) and not ancestors(id(%s)) and not id(%s)", |
|
108 | 108 | org_rev, other_rev, other_rev) |
|
109 | 109 | |
|
110 | 110 | org_changesets = [org_repo.get_changeset(hgrepo[rev].hex()) for rev in org_revs] |
|
111 | 111 | |
|
112 | 112 | elif alias == 'git': |
|
113 | 113 | if org_repo != other_repo: |
|
114 | 114 | from dulwich.repo import Repo |
|
115 | 115 | from dulwich.client import SubprocessGitClient |
|
116 | 116 | |
|
117 | 117 | gitrepo = Repo(org_repo.path) |
|
118 | 118 | SubprocessGitClient(thin_packs=False).fetch(other_repo.path, gitrepo) |
|
119 | 119 | |
|
120 | 120 | gitrepo_remote = Repo(other_repo.path) |
|
121 | 121 | SubprocessGitClient(thin_packs=False).fetch(org_repo.path, gitrepo_remote) |
|
122 | 122 | |
|
123 | 123 | revs = [] |
|
124 | 124 | for x in gitrepo_remote.get_walker(include=[other_rev], |
|
125 | 125 | exclude=[org_rev]): |
|
126 | 126 | revs.append(x.commit.id) |
|
127 | 127 | |
|
128 | 128 | other_changesets = [other_repo.get_changeset(rev) for rev in reversed(revs)] |
|
129 | 129 | if other_changesets: |
|
130 | 130 | ancestor = other_changesets[0].parents[0].raw_id |
|
131 | 131 | else: |
|
132 | 132 | # no changesets from other repo, ancestor is the other_rev |
|
133 | 133 | ancestor = other_rev |
|
134 | 134 | |
|
135 | 135 | else: |
|
136 | 136 | so, se = org_repo.run_git_command( |
|
137 | 137 | ['log', '--reverse', '--pretty=format:%H', |
|
138 | 138 | '-s', '%s..%s' % (org_rev, other_rev)] |
|
139 | 139 | ) |
|
140 | 140 | other_changesets = [org_repo.get_changeset(cs) |
|
141 | 141 | for cs in re.findall(r'[0-9a-fA-F]{40}', so)] |
|
142 | 142 | so, se = org_repo.run_git_command( |
|
143 | 143 | ['merge-base', org_rev, other_rev] |
|
144 | 144 | ) |
|
145 | 145 | ancestor = re.findall(r'[0-9a-fA-F]{40}', so)[0] |
|
146 | 146 | org_changesets = [] |
|
147 | 147 | |
|
148 | 148 | else: |
|
149 | 149 | raise Exception('Bad alias only git and hg is allowed') |
|
150 | 150 | |
|
151 | 151 | return other_changesets, org_changesets, ancestor |
|
152 | 152 | |
|
153 | 153 | @LoginRequired() |
|
154 | 154 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
155 | 155 | 'repository.admin') |
|
156 | 156 | def index(self, repo_name): |
|
157 | 157 | c.compare_home = True |
|
158 | 158 | org_repo = c.db_repo.repo_name |
|
159 | 159 | other_repo = request.GET.get('other_repo', org_repo) |
|
160 | 160 | c.a_repo = Repository.get_by_repo_name(org_repo) |
|
161 | 161 | c.cs_repo = Repository.get_by_repo_name(other_repo) |
|
162 | 162 | c.a_ref_name = c.cs_ref_name = _('Select changeset') |
|
163 | 163 | return render('compare/compare_diff.html') |
|
164 | 164 | |
|
165 | 165 | @LoginRequired() |
|
166 | 166 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
167 | 167 | 'repository.admin') |
|
168 | 168 | def compare(self, repo_name, org_ref_type, org_ref_name, other_ref_type, other_ref_name): |
|
169 | 169 | org_repo = c.db_repo.repo_name |
|
170 | 170 | other_repo = request.GET.get('other_repo', org_repo) |
|
171 | 171 | # If merge is True: |
|
172 | 172 | # Show what org would get if merged with other: |
|
173 | 173 | # List changesets that are ancestors of other but not of org. |
|
174 | 174 | # New changesets in org is thus ignored. |
|
175 | 175 | # Diff will be from common ancestor, and merges of org to other will thus be ignored. |
|
176 | 176 | # If merge is False: |
|
177 | 177 | # Make a raw diff from org to other, no matter if related or not. |
|
178 | 178 | # Changesets in one and not in the other will be ignored |
|
179 | 179 | merge = bool(request.GET.get('merge')) |
|
180 | 180 | # fulldiff disables cut_off_limit |
|
181 | 181 | c.fulldiff = request.GET.get('fulldiff') |
|
182 | 182 | # partial uses compare_cs.html template directly |
|
183 | 183 | partial = request.environ.get('HTTP_X_PARTIAL_XHR') |
|
184 | 184 | # as_form puts hidden input field with changeset revisions |
|
185 | 185 | c.as_form = partial and request.GET.get('as_form') |
|
186 | 186 | # swap url for compare_diff page - never partial and never as_form |
|
187 | 187 | c.swap_url = h.url('compare_url', |
|
188 | 188 | repo_name=other_repo, |
|
189 | 189 | org_ref_type=other_ref_type, org_ref_name=other_ref_name, |
|
190 | 190 | other_repo=org_repo, |
|
191 | 191 | other_ref_type=org_ref_type, other_ref_name=org_ref_name, |
|
192 | 192 | merge=merge or '') |
|
193 | 193 | |
|
194 | 194 | # set callbacks for generating markup for icons |
|
195 | 195 | c.ignorews_url = _ignorews_url |
|
196 | 196 | c.context_url = _context_url |
|
197 | 197 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
198 | 198 | line_context = request.GET.get('context', 3) |
|
199 | 199 | |
|
200 | 200 | org_repo = Repository.get_by_repo_name(org_repo) |
|
201 | 201 | other_repo = Repository.get_by_repo_name(other_repo) |
|
202 | 202 | |
|
203 | 203 | if org_repo is None: |
|
204 | 204 | msg = 'Could not find org repo %s' % org_repo |
|
205 | 205 | log.error(msg) |
|
206 | 206 | h.flash(msg, category='error') |
|
207 | 207 | return redirect(url('compare_home', repo_name=c.repo_name)) |
|
208 | 208 | |
|
209 | 209 | if other_repo is None: |
|
210 | 210 | msg = 'Could not find other repo %s' % other_repo |
|
211 | 211 | log.error(msg) |
|
212 | 212 | h.flash(msg, category='error') |
|
213 | 213 | return redirect(url('compare_home', repo_name=c.repo_name)) |
|
214 | 214 | |
|
215 | 215 | if org_repo.scm_instance.alias != other_repo.scm_instance.alias: |
|
216 | 216 | msg = 'compare of two different kind of remote repos not available' |
|
217 | 217 | log.error(msg) |
|
218 | 218 | h.flash(msg, category='error') |
|
219 | 219 | return redirect(url('compare_home', repo_name=c.repo_name)) |
|
220 | 220 | |
|
221 | 221 | c.a_rev = self._get_ref_rev(org_repo, org_ref_type, org_ref_name, |
|
222 | 222 | returnempty=True) |
|
223 | 223 | c.cs_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name) |
|
224 | 224 | |
|
225 | 225 | c.compare_home = False |
|
226 | 226 | c.a_repo = org_repo |
|
227 | 227 | c.a_ref_name = org_ref_name |
|
228 | 228 | c.a_ref_type = org_ref_type |
|
229 | 229 | c.cs_repo = other_repo |
|
230 | 230 | c.cs_ref_name = other_ref_name |
|
231 | 231 | c.cs_ref_type = other_ref_type |
|
232 | 232 | |
|
233 | 233 | c.cs_ranges, c.cs_ranges_org, c.ancestor = self._get_changesets( |
|
234 | 234 | org_repo.scm_instance.alias, org_repo.scm_instance, c.a_rev, |
|
235 | 235 | other_repo.scm_instance, c.cs_rev) |
|
236 | 236 | raw_ids = [x.raw_id for x in c.cs_ranges] |
|
237 | 237 | c.cs_comments = other_repo.get_comments(raw_ids) |
|
238 | 238 | c.statuses = other_repo.statuses(raw_ids) |
|
239 | 239 | |
|
240 | 240 | revs = [ctx.revision for ctx in reversed(c.cs_ranges)] |
|
241 | 241 | c.jsdata = json.dumps(graph_data(c.cs_repo.scm_instance, revs)) |
|
242 | 242 | |
|
243 | 243 | if partial: |
|
244 | 244 | return render('compare/compare_cs.html') |
|
245 | 245 | if merge and c.ancestor: |
|
246 | 246 | # case we want a simple diff without incoming changesets, |
|
247 | 247 | # previewing what will be merged. |
|
248 | 248 | # Make the diff on the other repo (which is known to have other_rev) |
|
249 | log.debug('Using ancestor %s as rev1 instead of %s' | |
|
250 |
|
|
|
249 | log.debug('Using ancestor %s as rev1 instead of %s', | |
|
250 | c.ancestor, c.a_rev) | |
|
251 | 251 | rev1 = c.ancestor |
|
252 | 252 | org_repo = other_repo |
|
253 | 253 | else: # comparing tips, not necessarily linearly related |
|
254 | 254 | if merge: |
|
255 | 255 | log.error('Unable to find ancestor revision') |
|
256 | 256 | if org_repo != other_repo: |
|
257 | 257 | # TODO: we could do this by using hg unionrepo |
|
258 | 258 | log.error('cannot compare across repos %s and %s', org_repo, other_repo) |
|
259 | 259 | h.flash(_('Cannot compare repositories without using common ancestor'), category='error') |
|
260 | 260 | raise HTTPBadRequest |
|
261 | 261 | rev1 = c.a_rev |
|
262 | 262 | |
|
263 | 263 | diff_limit = self.cut_off_limit if not c.fulldiff else None |
|
264 | 264 | |
|
265 | log.debug('running diff between %s and %s in %s' | |
|
266 |
|
|
|
265 | log.debug('running diff between %s and %s in %s', | |
|
266 | rev1, c.cs_rev, org_repo.scm_instance.path) | |
|
267 | 267 | txtdiff = org_repo.scm_instance.get_diff(rev1=rev1, rev2=c.cs_rev, |
|
268 | 268 | ignore_whitespace=ignore_whitespace, |
|
269 | 269 | context=line_context) |
|
270 | 270 | |
|
271 | 271 | diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff', |
|
272 | 272 | diff_limit=diff_limit) |
|
273 | 273 | _parsed = diff_processor.prepare() |
|
274 | 274 | |
|
275 | 275 | c.limited_diff = False |
|
276 | 276 | if isinstance(_parsed, LimitedDiffContainer): |
|
277 | 277 | c.limited_diff = True |
|
278 | 278 | |
|
279 | 279 | c.files = [] |
|
280 | 280 | c.changes = {} |
|
281 | 281 | c.lines_added = 0 |
|
282 | 282 | c.lines_deleted = 0 |
|
283 | 283 | for f in _parsed: |
|
284 | 284 | st = f['stats'] |
|
285 | 285 | if not st['binary']: |
|
286 | 286 | c.lines_added += st['added'] |
|
287 | 287 | c.lines_deleted += st['deleted'] |
|
288 | 288 | fid = h.FID('', f['filename']) |
|
289 | 289 | c.files.append([fid, f['operation'], f['filename'], f['stats']]) |
|
290 | 290 | htmldiff = diff_processor.as_html(enable_comments=False, parsed_lines=[f]) |
|
291 | 291 | c.changes[fid] = [f['operation'], f['filename'], htmldiff] |
|
292 | 292 | |
|
293 | 293 | return render('compare/compare_diff.html') |
@@ -1,110 +1,110 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.error |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea error controller |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Dec 8, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import cgi |
|
30 | 30 | import logging |
|
31 | 31 | import paste.fileapp |
|
32 | 32 | |
|
33 | 33 | from pylons import tmpl_context as c, request, config |
|
34 | 34 | from pylons.i18n.translation import _ |
|
35 | 35 | from pylons.middleware import media_path |
|
36 | 36 | |
|
37 | 37 | from kallithea.lib.base import BaseController, render |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class ErrorController(BaseController): |
|
43 | 43 | """Generates error documents as and when they are required. |
|
44 | 44 | |
|
45 | 45 | The ErrorDocuments middleware forwards to ErrorController when error |
|
46 | 46 | related status codes are returned from the application. |
|
47 | 47 | |
|
48 | 48 | This behavior can be altered by changing the parameters to the |
|
49 | 49 | ErrorDocuments middleware in your config/middleware.py file. |
|
50 | 50 | """ |
|
51 | 51 | |
|
52 | 52 | def __before__(self): |
|
53 | 53 | # disable all base actions since we don't need them here |
|
54 | 54 | pass |
|
55 | 55 | |
|
56 | 56 | def document(self): |
|
57 | 57 | resp = request.environ.get('pylons.original_response') |
|
58 | 58 | c.site_name = config.get('title') |
|
59 | 59 | |
|
60 |
log.debug('### %s ###' |
|
|
60 | log.debug('### %s ###', resp and resp.status or 'no response') | |
|
61 | 61 | |
|
62 | 62 | e = request.environ |
|
63 | 63 | c.serv_p = r'%(protocol)s://%(host)s/' % { |
|
64 | 64 | 'protocol': e.get('wsgi.url_scheme'), |
|
65 | 65 | 'host': e.get('HTTP_HOST'), } |
|
66 | 66 | if resp: |
|
67 | 67 | c.error_message = cgi.escape(request.GET.get('code', |
|
68 | 68 | str(resp.status))) |
|
69 | 69 | c.error_explanation = self.get_error_explanation(resp.status_int) |
|
70 | 70 | else: |
|
71 | 71 | c.error_message = _('No response') |
|
72 | 72 | c.error_explanation = _('Unknown error') |
|
73 | 73 | |
|
74 | 74 | return render('/errors/error_document.html') |
|
75 | 75 | |
|
76 | 76 | def img(self, id): |
|
77 | 77 | """Serve Pylons' stock images""" |
|
78 | 78 | return self._serve_file(os.path.join(media_path, 'img', id)) |
|
79 | 79 | |
|
80 | 80 | def style(self, id): |
|
81 | 81 | """Serve Pylons' stock stylesheets""" |
|
82 | 82 | return self._serve_file(os.path.join(media_path, 'style', id)) |
|
83 | 83 | |
|
84 | 84 | def _serve_file(self, path): |
|
85 | 85 | """Call Paste's FileApp (a WSGI application) to serve the file |
|
86 | 86 | at the specified path |
|
87 | 87 | """ |
|
88 | 88 | fapp = paste.fileapp.FileApp(path) |
|
89 | 89 | return fapp(request.environ, self.start_response) |
|
90 | 90 | |
|
91 | 91 | def get_error_explanation(self, code): |
|
92 | 92 | """ get the error explanations of int codes |
|
93 | 93 | [400, 401, 403, 404, 500]""" |
|
94 | 94 | try: |
|
95 | 95 | code = int(code) |
|
96 | 96 | except ValueError: |
|
97 | 97 | code = 500 |
|
98 | 98 | |
|
99 | 99 | if code == 400: |
|
100 | 100 | return _('The request could not be understood by the server' |
|
101 | 101 | ' due to malformed syntax.') |
|
102 | 102 | if code == 401: |
|
103 | 103 | return _('Unauthorized access to resource') |
|
104 | 104 | if code == 403: |
|
105 | 105 | return _("You don't have permission to view this page") |
|
106 | 106 | if code == 404: |
|
107 | 107 | return _('The resource could not be found') |
|
108 | 108 | if code == 500: |
|
109 | 109 | return _('The server encountered an unexpected condition' |
|
110 | 110 | ' which prevented it from fulfilling the request.') |
@@ -1,801 +1,801 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.files |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Files controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 21, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | from __future__ import with_statement |
|
29 | 29 | import os |
|
30 | 30 | import logging |
|
31 | 31 | import traceback |
|
32 | 32 | import tempfile |
|
33 | 33 | import shutil |
|
34 | 34 | |
|
35 | 35 | from pylons import request, response, tmpl_context as c, url |
|
36 | 36 | from pylons.i18n.translation import _ |
|
37 | 37 | from pylons.controllers.util import redirect |
|
38 | 38 | from kallithea.lib.utils import jsonify, action_logger |
|
39 | 39 | |
|
40 | 40 | from kallithea.lib import diffs |
|
41 | 41 | from kallithea.lib import helpers as h |
|
42 | 42 | |
|
43 | 43 | from kallithea.lib.compat import OrderedDict |
|
44 | 44 | from kallithea.lib.utils2 import convert_line_endings, detect_mode, safe_str,\ |
|
45 | 45 | str2bool |
|
46 | 46 | from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator |
|
47 | 47 | from kallithea.lib.base import BaseRepoController, render |
|
48 | 48 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
49 | 49 | from kallithea.lib.vcs.conf import settings |
|
50 | 50 | from kallithea.lib.vcs.exceptions import RepositoryError, \ |
|
51 | 51 | ChangesetDoesNotExistError, EmptyRepositoryError, \ |
|
52 | 52 | ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,\ |
|
53 | 53 | NodeDoesNotExistError, ChangesetError, NodeError |
|
54 | 54 | from kallithea.lib.vcs.nodes import FileNode |
|
55 | 55 | |
|
56 | 56 | from kallithea.model.repo import RepoModel |
|
57 | 57 | from kallithea.model.scm import ScmModel |
|
58 | 58 | from kallithea.model.db import Repository |
|
59 | 59 | |
|
60 | 60 | from kallithea.controllers.changeset import anchor_url, _ignorews_url,\ |
|
61 | 61 | _context_url, get_line_ctx, get_ignore_ws |
|
62 | 62 | from webob.exc import HTTPNotFound |
|
63 | 63 | from kallithea.lib.exceptions import NonRelativePathError |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | log = logging.getLogger(__name__) |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | class FilesController(BaseRepoController): |
|
70 | 70 | |
|
71 | 71 | def __before__(self): |
|
72 | 72 | super(FilesController, self).__before__() |
|
73 | 73 | c.cut_off_limit = self.cut_off_limit |
|
74 | 74 | |
|
75 | 75 | def __get_cs(self, rev, silent_empty=False): |
|
76 | 76 | """ |
|
77 | 77 | Safe way to get changeset if error occur it redirects to tip with |
|
78 | 78 | proper message |
|
79 | 79 | |
|
80 | 80 | :param rev: revision to fetch |
|
81 | 81 | :silent_empty: return None if repository is empty |
|
82 | 82 | """ |
|
83 | 83 | |
|
84 | 84 | try: |
|
85 | 85 | return c.db_repo_scm_instance.get_changeset(rev) |
|
86 | 86 | except EmptyRepositoryError as e: |
|
87 | 87 | if silent_empty: |
|
88 | 88 | return None |
|
89 | 89 | url_ = url('files_add_home', |
|
90 | 90 | repo_name=c.repo_name, |
|
91 | 91 | revision=0, f_path='', anchor='edit') |
|
92 | 92 | add_new = h.link_to(_('Click here to add new file'), url_, class_="alert-link") |
|
93 | 93 | h.flash(h.literal(_('There are no files yet. %s') % add_new), |
|
94 | 94 | category='warning') |
|
95 | 95 | raise HTTPNotFound() |
|
96 | 96 | except(ChangesetDoesNotExistError, LookupError), e: |
|
97 | 97 | msg = _('Such revision does not exist for this repository') |
|
98 | 98 | h.flash(msg, category='error') |
|
99 | 99 | raise HTTPNotFound() |
|
100 | 100 | except RepositoryError as e: |
|
101 | 101 | h.flash(safe_str(e), category='error') |
|
102 | 102 | raise HTTPNotFound() |
|
103 | 103 | |
|
104 | 104 | def __get_filenode(self, cs, path): |
|
105 | 105 | """ |
|
106 | 106 | Returns file_node or raise HTTP error. |
|
107 | 107 | |
|
108 | 108 | :param cs: given changeset |
|
109 | 109 | :param path: path to lookup |
|
110 | 110 | """ |
|
111 | 111 | |
|
112 | 112 | try: |
|
113 | 113 | file_node = cs.get_node(path) |
|
114 | 114 | if file_node.is_dir(): |
|
115 | 115 | raise RepositoryError('given path is a directory') |
|
116 | 116 | except(ChangesetDoesNotExistError,), e: |
|
117 | 117 | msg = _('Such revision does not exist for this repository') |
|
118 | 118 | h.flash(msg, category='error') |
|
119 | 119 | raise HTTPNotFound() |
|
120 | 120 | except RepositoryError as e: |
|
121 | 121 | h.flash(safe_str(e), category='error') |
|
122 | 122 | raise HTTPNotFound() |
|
123 | 123 | |
|
124 | 124 | return file_node |
|
125 | 125 | |
|
126 | 126 | @LoginRequired() |
|
127 | 127 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
128 | 128 | 'repository.admin') |
|
129 | 129 | def index(self, repo_name, revision, f_path, annotate=False): |
|
130 | 130 | # redirect to given revision from form if given |
|
131 | 131 | post_revision = request.POST.get('at_rev', None) |
|
132 | 132 | if post_revision: |
|
133 | 133 | cs = self.__get_cs(post_revision) # FIXME - unused! |
|
134 | 134 | |
|
135 | 135 | c.revision = revision |
|
136 | 136 | c.changeset = self.__get_cs(revision) |
|
137 | 137 | c.branch = request.GET.get('branch', None) |
|
138 | 138 | c.f_path = f_path |
|
139 | 139 | c.annotate = annotate |
|
140 | 140 | cur_rev = c.changeset.revision |
|
141 | 141 | |
|
142 | 142 | # prev link |
|
143 | 143 | try: |
|
144 | 144 | prev_rev = c.db_repo_scm_instance.get_changeset(cur_rev).prev(c.branch) |
|
145 | 145 | c.url_prev = url('files_home', repo_name=c.repo_name, |
|
146 | 146 | revision=prev_rev.raw_id, f_path=f_path) |
|
147 | 147 | if c.branch: |
|
148 | 148 | c.url_prev += '?branch=%s' % c.branch |
|
149 | 149 | except (ChangesetDoesNotExistError, VCSError): |
|
150 | 150 | c.url_prev = '#' |
|
151 | 151 | |
|
152 | 152 | # next link |
|
153 | 153 | try: |
|
154 | 154 | next_rev = c.db_repo_scm_instance.get_changeset(cur_rev).next(c.branch) |
|
155 | 155 | c.url_next = url('files_home', repo_name=c.repo_name, |
|
156 | 156 | revision=next_rev.raw_id, f_path=f_path) |
|
157 | 157 | if c.branch: |
|
158 | 158 | c.url_next += '?branch=%s' % c.branch |
|
159 | 159 | except (ChangesetDoesNotExistError, VCSError): |
|
160 | 160 | c.url_next = '#' |
|
161 | 161 | |
|
162 | 162 | # files or dirs |
|
163 | 163 | try: |
|
164 | 164 | c.file = c.changeset.get_node(f_path) |
|
165 | 165 | |
|
166 | 166 | if c.file.is_file(): |
|
167 | 167 | c.load_full_history = False |
|
168 | 168 | file_last_cs = c.file.last_changeset |
|
169 | 169 | c.file_changeset = (c.changeset |
|
170 | 170 | if c.changeset.revision < file_last_cs.revision |
|
171 | 171 | else file_last_cs) |
|
172 | 172 | #determine if we're on branch head |
|
173 | 173 | _branches = c.db_repo_scm_instance.branches |
|
174 | 174 | c.on_branch_head = revision in _branches.keys() + _branches.values() |
|
175 | 175 | _hist = [] |
|
176 | 176 | c.file_history = [] |
|
177 | 177 | if c.load_full_history: |
|
178 | 178 | c.file_history, _hist = self._get_node_history(c.changeset, f_path) |
|
179 | 179 | |
|
180 | 180 | c.authors = [] |
|
181 | 181 | for a in set([x.author for x in _hist]): |
|
182 | 182 | c.authors.append((h.email(a), h.person(a))) |
|
183 | 183 | else: |
|
184 | 184 | c.authors = c.file_history = [] |
|
185 | 185 | except RepositoryError as e: |
|
186 | 186 | h.flash(safe_str(e), category='error') |
|
187 | 187 | raise HTTPNotFound() |
|
188 | 188 | |
|
189 | 189 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
190 | 190 | return render('files/files_ypjax.html') |
|
191 | 191 | |
|
192 | 192 | # TODO: tags and bookmarks? |
|
193 | 193 | c.revision_options = [(c.changeset.raw_id, |
|
194 | 194 | _('%s at %s') % (c.changeset.branch, h.short_id(c.changeset.raw_id)))] + \ |
|
195 | 195 | [(n, b) for b, n in c.db_repo_scm_instance.branches.items()] |
|
196 | 196 | if c.db_repo_scm_instance.closed_branches: |
|
197 | 197 | prefix = _('(closed)') + ' ' |
|
198 | 198 | c.revision_options += [('-', '-')] + \ |
|
199 | 199 | [(n, prefix + b) for b, n in c.db_repo_scm_instance.closed_branches.items()] |
|
200 | 200 | |
|
201 | 201 | return render('files/files.html') |
|
202 | 202 | |
|
203 | 203 | @LoginRequired() |
|
204 | 204 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
205 | 205 | 'repository.admin') |
|
206 | 206 | @jsonify |
|
207 | 207 | def history(self, repo_name, revision, f_path): |
|
208 | 208 | changeset = self.__get_cs(revision) |
|
209 | 209 | f_path = f_path |
|
210 | 210 | _file = changeset.get_node(f_path) |
|
211 | 211 | if _file.is_file(): |
|
212 | 212 | file_history, _hist = self._get_node_history(changeset, f_path) |
|
213 | 213 | |
|
214 | 214 | res = [] |
|
215 | 215 | for obj in file_history: |
|
216 | 216 | res.append({ |
|
217 | 217 | 'text': obj[1], |
|
218 | 218 | 'children': [{'id': o[0], 'text': o[1]} for o in obj[0]] |
|
219 | 219 | }) |
|
220 | 220 | |
|
221 | 221 | data = { |
|
222 | 222 | 'more': False, |
|
223 | 223 | 'results': res |
|
224 | 224 | } |
|
225 | 225 | return data |
|
226 | 226 | |
|
227 | 227 | @LoginRequired() |
|
228 | 228 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
229 | 229 | 'repository.admin') |
|
230 | 230 | def authors(self, repo_name, revision, f_path): |
|
231 | 231 | changeset = self.__get_cs(revision) |
|
232 | 232 | f_path = f_path |
|
233 | 233 | _file = changeset.get_node(f_path) |
|
234 | 234 | if _file.is_file(): |
|
235 | 235 | file_history, _hist = self._get_node_history(changeset, f_path) |
|
236 | 236 | c.authors = [] |
|
237 | 237 | for a in set([x.author for x in _hist]): |
|
238 | 238 | c.authors.append((h.email(a), h.person(a))) |
|
239 | 239 | return render('files/files_history_box.html') |
|
240 | 240 | |
|
241 | 241 | @LoginRequired() |
|
242 | 242 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
243 | 243 | 'repository.admin') |
|
244 | 244 | def rawfile(self, repo_name, revision, f_path): |
|
245 | 245 | cs = self.__get_cs(revision) |
|
246 | 246 | file_node = self.__get_filenode(cs, f_path) |
|
247 | 247 | |
|
248 | 248 | response.content_disposition = 'attachment; filename=%s' % \ |
|
249 | 249 | safe_str(f_path.split(Repository.url_sep())[-1]) |
|
250 | 250 | |
|
251 | 251 | response.content_type = file_node.mimetype |
|
252 | 252 | return file_node.content |
|
253 | 253 | |
|
254 | 254 | @LoginRequired() |
|
255 | 255 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
256 | 256 | 'repository.admin') |
|
257 | 257 | def raw(self, repo_name, revision, f_path): |
|
258 | 258 | cs = self.__get_cs(revision) |
|
259 | 259 | file_node = self.__get_filenode(cs, f_path) |
|
260 | 260 | |
|
261 | 261 | raw_mimetype_mapping = { |
|
262 | 262 | # map original mimetype to a mimetype used for "show as raw" |
|
263 | 263 | # you can also provide a content-disposition to override the |
|
264 | 264 | # default "attachment" disposition. |
|
265 | 265 | # orig_type: (new_type, new_dispo) |
|
266 | 266 | |
|
267 | 267 | # show images inline: |
|
268 | 268 | 'image/x-icon': ('image/x-icon', 'inline'), |
|
269 | 269 | 'image/png': ('image/png', 'inline'), |
|
270 | 270 | 'image/gif': ('image/gif', 'inline'), |
|
271 | 271 | 'image/jpeg': ('image/jpeg', 'inline'), |
|
272 | 272 | 'image/svg+xml': ('image/svg+xml', 'inline'), |
|
273 | 273 | } |
|
274 | 274 | |
|
275 | 275 | mimetype = file_node.mimetype |
|
276 | 276 | try: |
|
277 | 277 | mimetype, dispo = raw_mimetype_mapping[mimetype] |
|
278 | 278 | except KeyError: |
|
279 | 279 | # we don't know anything special about this, handle it safely |
|
280 | 280 | if file_node.is_binary: |
|
281 | 281 | # do same as download raw for binary files |
|
282 | 282 | mimetype, dispo = 'application/octet-stream', 'attachment' |
|
283 | 283 | else: |
|
284 | 284 | # do not just use the original mimetype, but force text/plain, |
|
285 | 285 | # otherwise it would serve text/html and that might be unsafe. |
|
286 | 286 | # Note: underlying vcs library fakes text/plain mimetype if the |
|
287 | 287 | # mimetype can not be determined and it thinks it is not |
|
288 | 288 | # binary.This might lead to erroneous text display in some |
|
289 | 289 | # cases, but helps in other cases, like with text files |
|
290 | 290 | # without extension. |
|
291 | 291 | mimetype, dispo = 'text/plain', 'inline' |
|
292 | 292 | |
|
293 | 293 | if dispo == 'attachment': |
|
294 | 294 | dispo = 'attachment; filename=%s' % \ |
|
295 | 295 | safe_str(f_path.split(os.sep)[-1]) |
|
296 | 296 | |
|
297 | 297 | response.content_disposition = dispo |
|
298 | 298 | response.content_type = mimetype |
|
299 | 299 | return file_node.content |
|
300 | 300 | |
|
301 | 301 | @LoginRequired() |
|
302 | 302 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
303 | 303 | def delete(self, repo_name, revision, f_path): |
|
304 | 304 | repo = c.db_repo |
|
305 | 305 | if repo.enable_locking and repo.locked[0]: |
|
306 | 306 | h.flash(_('This repository has been locked by %s on %s') |
|
307 | 307 | % (h.person_by_id(repo.locked[0]), |
|
308 | 308 | h.fmt_date(h.time_to_datetime(repo.locked[1]))), |
|
309 | 309 | 'warning') |
|
310 | 310 | return redirect(h.url('files_home', |
|
311 | 311 | repo_name=repo_name, revision='tip')) |
|
312 | 312 | |
|
313 | 313 | # check if revision is a branch identifier- basically we cannot |
|
314 | 314 | # create multiple heads via file editing |
|
315 | 315 | _branches = repo.scm_instance.branches |
|
316 | 316 | # check if revision is a branch name or branch hash |
|
317 | 317 | if revision not in _branches.keys() + _branches.values(): |
|
318 | 318 | h.flash(_('You can only delete files with revision ' |
|
319 | 319 | 'being a valid branch '), category='warning') |
|
320 | 320 | return redirect(h.url('files_home', |
|
321 | 321 | repo_name=repo_name, revision='tip', |
|
322 | 322 | f_path=f_path)) |
|
323 | 323 | |
|
324 | 324 | r_post = request.POST |
|
325 | 325 | |
|
326 | 326 | c.cs = self.__get_cs(revision) |
|
327 | 327 | c.file = self.__get_filenode(c.cs, f_path) |
|
328 | 328 | |
|
329 | 329 | c.default_message = _('Deleted file %s via Kallithea') % (f_path) |
|
330 | 330 | c.f_path = f_path |
|
331 | 331 | node_path = f_path |
|
332 | 332 | author = self.authuser.full_contact |
|
333 | 333 | |
|
334 | 334 | if r_post: |
|
335 | 335 | message = r_post.get('message') or c.default_message |
|
336 | 336 | |
|
337 | 337 | try: |
|
338 | 338 | nodes = { |
|
339 | 339 | node_path: { |
|
340 | 340 | 'content': '' |
|
341 | 341 | } |
|
342 | 342 | } |
|
343 | 343 | self.scm_model.delete_nodes( |
|
344 | 344 | user=c.authuser.user_id, repo=c.db_repo, |
|
345 | 345 | message=message, |
|
346 | 346 | nodes=nodes, |
|
347 | 347 | parent_cs=c.cs, |
|
348 | 348 | author=author, |
|
349 | 349 | ) |
|
350 | 350 | |
|
351 | 351 | h.flash(_('Successfully deleted file %s') % f_path, |
|
352 | 352 | category='success') |
|
353 | 353 | except Exception: |
|
354 | 354 | log.error(traceback.format_exc()) |
|
355 | 355 | h.flash(_('Error occurred during commit'), category='error') |
|
356 | 356 | return redirect(url('changeset_home', |
|
357 | 357 | repo_name=c.repo_name, revision='tip')) |
|
358 | 358 | |
|
359 | 359 | return render('files/files_delete.html') |
|
360 | 360 | |
|
361 | 361 | @LoginRequired() |
|
362 | 362 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
363 | 363 | def edit(self, repo_name, revision, f_path): |
|
364 | 364 | repo = c.db_repo |
|
365 | 365 | if repo.enable_locking and repo.locked[0]: |
|
366 | 366 | h.flash(_('This repository has been locked by %s on %s') |
|
367 | 367 | % (h.person_by_id(repo.locked[0]), |
|
368 | 368 | h.fmt_date(h.time_to_datetime(repo.locked[1]))), |
|
369 | 369 | 'warning') |
|
370 | 370 | return redirect(h.url('files_home', |
|
371 | 371 | repo_name=repo_name, revision='tip')) |
|
372 | 372 | |
|
373 | 373 | # check if revision is a branch identifier- basically we cannot |
|
374 | 374 | # create multiple heads via file editing |
|
375 | 375 | _branches = repo.scm_instance.branches |
|
376 | 376 | # check if revision is a branch name or branch hash |
|
377 | 377 | if revision not in _branches.keys() + _branches.values(): |
|
378 | 378 | h.flash(_('You can only edit files with revision ' |
|
379 | 379 | 'being a valid branch '), category='warning') |
|
380 | 380 | return redirect(h.url('files_home', |
|
381 | 381 | repo_name=repo_name, revision='tip', |
|
382 | 382 | f_path=f_path)) |
|
383 | 383 | |
|
384 | 384 | r_post = request.POST |
|
385 | 385 | |
|
386 | 386 | c.cs = self.__get_cs(revision) |
|
387 | 387 | c.file = self.__get_filenode(c.cs, f_path) |
|
388 | 388 | |
|
389 | 389 | if c.file.is_binary: |
|
390 | 390 | return redirect(url('files_home', repo_name=c.repo_name, |
|
391 | 391 | revision=c.cs.raw_id, f_path=f_path)) |
|
392 | 392 | c.default_message = _('Edited file %s via Kallithea') % (f_path) |
|
393 | 393 | c.f_path = f_path |
|
394 | 394 | |
|
395 | 395 | if r_post: |
|
396 | 396 | |
|
397 | 397 | old_content = c.file.content |
|
398 | 398 | sl = old_content.splitlines(1) |
|
399 | 399 | first_line = sl[0] if sl else '' |
|
400 | 400 | # modes: 0 - Unix, 1 - Mac, 2 - DOS |
|
401 | 401 | mode = detect_mode(first_line, 0) |
|
402 | 402 | content = convert_line_endings(r_post.get('content', ''), mode) |
|
403 | 403 | |
|
404 | 404 | message = r_post.get('message') or c.default_message |
|
405 | 405 | author = self.authuser.full_contact |
|
406 | 406 | |
|
407 | 407 | if content == old_content: |
|
408 | 408 | h.flash(_('No changes'), category='warning') |
|
409 | 409 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
410 | 410 | revision='tip')) |
|
411 | 411 | try: |
|
412 | 412 | self.scm_model.commit_change(repo=c.db_repo_scm_instance, |
|
413 | 413 | repo_name=repo_name, cs=c.cs, |
|
414 | 414 | user=self.authuser.user_id, |
|
415 | 415 | author=author, message=message, |
|
416 | 416 | content=content, f_path=f_path) |
|
417 | 417 | h.flash(_('Successfully committed to %s') % f_path, |
|
418 | 418 | category='success') |
|
419 | 419 | except Exception: |
|
420 | 420 | log.error(traceback.format_exc()) |
|
421 | 421 | h.flash(_('Error occurred during commit'), category='error') |
|
422 | 422 | return redirect(url('changeset_home', |
|
423 | 423 | repo_name=c.repo_name, revision='tip')) |
|
424 | 424 | |
|
425 | 425 | return render('files/files_edit.html') |
|
426 | 426 | |
|
427 | 427 | @LoginRequired() |
|
428 | 428 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
429 | 429 | def add(self, repo_name, revision, f_path): |
|
430 | 430 | |
|
431 | 431 | repo = Repository.get_by_repo_name(repo_name) |
|
432 | 432 | if repo.enable_locking and repo.locked[0]: |
|
433 | 433 | h.flash(_('This repository has been locked by %s on %s') |
|
434 | 434 | % (h.person_by_id(repo.locked[0]), |
|
435 | 435 | h.fmt_date(h.time_to_datetime(repo.locked[1]))), |
|
436 | 436 | 'warning') |
|
437 | 437 | return redirect(h.url('files_home', |
|
438 | 438 | repo_name=repo_name, revision='tip')) |
|
439 | 439 | |
|
440 | 440 | r_post = request.POST |
|
441 | 441 | c.cs = self.__get_cs(revision, silent_empty=True) |
|
442 | 442 | if c.cs is None: |
|
443 | 443 | c.cs = EmptyChangeset(alias=c.db_repo_scm_instance.alias) |
|
444 | 444 | c.default_message = (_('Added file via Kallithea')) |
|
445 | 445 | c.f_path = f_path |
|
446 | 446 | |
|
447 | 447 | if r_post: |
|
448 | 448 | unix_mode = 0 |
|
449 | 449 | content = convert_line_endings(r_post.get('content', ''), unix_mode) |
|
450 | 450 | |
|
451 | 451 | message = r_post.get('message') or c.default_message |
|
452 | 452 | filename = r_post.get('filename') |
|
453 | 453 | location = r_post.get('location', '') |
|
454 | 454 | file_obj = r_post.get('upload_file', None) |
|
455 | 455 | |
|
456 | 456 | if file_obj is not None and hasattr(file_obj, 'filename'): |
|
457 | 457 | filename = file_obj.filename |
|
458 | 458 | content = file_obj.file |
|
459 | 459 | |
|
460 | 460 | if hasattr(content, 'file'): |
|
461 | 461 | # non posix systems store real file under file attr |
|
462 | 462 | content = content.file |
|
463 | 463 | |
|
464 | 464 | if not content: |
|
465 | 465 | h.flash(_('No content'), category='warning') |
|
466 | 466 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
467 | 467 | revision='tip')) |
|
468 | 468 | if not filename: |
|
469 | 469 | h.flash(_('No filename'), category='warning') |
|
470 | 470 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
471 | 471 | revision='tip')) |
|
472 | 472 | #strip all crap out of file, just leave the basename |
|
473 | 473 | filename = os.path.basename(filename) |
|
474 | 474 | node_path = os.path.join(location, filename) |
|
475 | 475 | author = self.authuser.full_contact |
|
476 | 476 | |
|
477 | 477 | try: |
|
478 | 478 | nodes = { |
|
479 | 479 | node_path: { |
|
480 | 480 | 'content': content |
|
481 | 481 | } |
|
482 | 482 | } |
|
483 | 483 | self.scm_model.create_nodes( |
|
484 | 484 | user=c.authuser.user_id, repo=c.db_repo, |
|
485 | 485 | message=message, |
|
486 | 486 | nodes=nodes, |
|
487 | 487 | parent_cs=c.cs, |
|
488 | 488 | author=author, |
|
489 | 489 | ) |
|
490 | 490 | |
|
491 | 491 | h.flash(_('Successfully committed to %s') % node_path, |
|
492 | 492 | category='success') |
|
493 | 493 | except NonRelativePathError as e: |
|
494 | 494 | h.flash(_('Location must be relative path and must not ' |
|
495 | 495 | 'contain .. in path'), category='warning') |
|
496 | 496 | return redirect(url('changeset_home', repo_name=c.repo_name, |
|
497 | 497 | revision='tip')) |
|
498 | 498 | except (NodeError, NodeAlreadyExistsError) as e: |
|
499 | 499 | h.flash(_(e), category='error') |
|
500 | 500 | except Exception: |
|
501 | 501 | log.error(traceback.format_exc()) |
|
502 | 502 | h.flash(_('Error occurred during commit'), category='error') |
|
503 | 503 | return redirect(url('changeset_home', |
|
504 | 504 | repo_name=c.repo_name, revision='tip')) |
|
505 | 505 | |
|
506 | 506 | return render('files/files_add.html') |
|
507 | 507 | |
|
508 | 508 | @LoginRequired() |
|
509 | 509 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
510 | 510 | 'repository.admin') |
|
511 | 511 | def archivefile(self, repo_name, fname): |
|
512 | 512 | fileformat = None |
|
513 | 513 | revision = None |
|
514 | 514 | ext = None |
|
515 | 515 | subrepos = request.GET.get('subrepos') == 'true' |
|
516 | 516 | |
|
517 | 517 | for a_type, ext_data in settings.ARCHIVE_SPECS.items(): |
|
518 | 518 | archive_spec = fname.split(ext_data[1]) |
|
519 | 519 | if len(archive_spec) == 2 and archive_spec[1] == '': |
|
520 | 520 | fileformat = a_type or ext_data[1] |
|
521 | 521 | revision = archive_spec[0] |
|
522 | 522 | ext = ext_data[1] |
|
523 | 523 | |
|
524 | 524 | try: |
|
525 | 525 | dbrepo = RepoModel().get_by_repo_name(repo_name) |
|
526 | 526 | if not dbrepo.enable_downloads: |
|
527 | 527 | return _('Downloads disabled') # TODO: do something else? |
|
528 | 528 | |
|
529 | 529 | if c.db_repo_scm_instance.alias == 'hg': |
|
530 | 530 | # patch and reset hooks section of UI config to not run any |
|
531 | 531 | # hooks on fetching archives with subrepos |
|
532 | 532 | for k, v in c.db_repo_scm_instance._repo.ui.configitems('hooks'): |
|
533 | 533 | c.db_repo_scm_instance._repo.ui.setconfig('hooks', k, None) |
|
534 | 534 | |
|
535 | 535 | cs = c.db_repo_scm_instance.get_changeset(revision) |
|
536 | 536 | content_type = settings.ARCHIVE_SPECS[fileformat][0] |
|
537 | 537 | except ChangesetDoesNotExistError: |
|
538 | 538 | return _('Unknown revision %s') % revision |
|
539 | 539 | except EmptyRepositoryError: |
|
540 | 540 | return _('Empty repository') |
|
541 | 541 | except (ImproperArchiveTypeError, KeyError): |
|
542 | 542 | return _('Unknown archive type') |
|
543 | 543 | |
|
544 | 544 | from kallithea import CONFIG |
|
545 | 545 | rev_name = cs.raw_id[:12] |
|
546 | 546 | archive_name = '%s-%s%s' % (safe_str(repo_name.replace('/', '_')), |
|
547 | 547 | safe_str(rev_name), ext) |
|
548 | 548 | |
|
549 | 549 | archive_path = None |
|
550 | 550 | cached_archive_path = None |
|
551 | 551 | archive_cache_dir = CONFIG.get('archive_cache_dir') |
|
552 | 552 | if archive_cache_dir and not subrepos: # TOOD: subrepo caching? |
|
553 | 553 | if not os.path.isdir(archive_cache_dir): |
|
554 | 554 | os.makedirs(archive_cache_dir) |
|
555 | 555 | cached_archive_path = os.path.join(archive_cache_dir, archive_name) |
|
556 | 556 | if os.path.isfile(cached_archive_path): |
|
557 |
log.debug('Found cached archive in %s' |
|
|
557 | log.debug('Found cached archive in %s', cached_archive_path) | |
|
558 | 558 | archive_path = cached_archive_path |
|
559 | 559 | else: |
|
560 |
log.debug('Archive %s is not yet cached' |
|
|
560 | log.debug('Archive %s is not yet cached', archive_name) | |
|
561 | 561 | |
|
562 | 562 | if archive_path is None: |
|
563 | 563 | # generate new archive |
|
564 | 564 | fd, archive_path = tempfile.mkstemp() |
|
565 |
log.debug('Creating new temp archive in %s' |
|
|
565 | log.debug('Creating new temp archive in %s', archive_path) | |
|
566 | 566 | with os.fdopen(fd, 'wb') as stream: |
|
567 | 567 | cs.fill_archive(stream=stream, kind=fileformat, subrepos=subrepos) |
|
568 | 568 | # stream (and thus fd) has been closed by cs.fill_archive |
|
569 | 569 | if cached_archive_path is not None: |
|
570 | 570 | # we generated the archive - move it to cache |
|
571 |
log.debug('Storing new archive in %s' |
|
|
571 | log.debug('Storing new archive in %s', cached_archive_path) | |
|
572 | 572 | shutil.move(archive_path, cached_archive_path) |
|
573 | 573 | archive_path = cached_archive_path |
|
574 | 574 | |
|
575 | 575 | def get_chunked_archive(archive_path): |
|
576 | 576 | stream = open(archive_path, 'rb') |
|
577 | 577 | while True: |
|
578 | 578 | data = stream.read(16 * 1024) |
|
579 | 579 | if not data: |
|
580 | 580 | break |
|
581 | 581 | yield data |
|
582 | 582 | stream.close() |
|
583 | 583 | if archive_path != cached_archive_path: |
|
584 |
log.debug('Destroying temp archive %s' |
|
|
584 | log.debug('Destroying temp archive %s', archive_path) | |
|
585 | 585 | os.remove(archive_path) |
|
586 | 586 | |
|
587 | 587 | action_logger(user=c.authuser, |
|
588 | 588 | action='user_downloaded_archive:%s' % (archive_name), |
|
589 | 589 | repo=repo_name, ipaddr=self.ip_addr, commit=True) |
|
590 | 590 | |
|
591 | 591 | response.content_disposition = str('attachment; filename=%s' % (archive_name)) |
|
592 | 592 | response.content_type = str(content_type) |
|
593 | 593 | return get_chunked_archive(archive_path) |
|
594 | 594 | |
|
595 | 595 | @LoginRequired() |
|
596 | 596 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
597 | 597 | 'repository.admin') |
|
598 | 598 | def diff(self, repo_name, f_path): |
|
599 | 599 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
600 | 600 | line_context = request.GET.get('context', 3) |
|
601 | 601 | diff2 = request.GET.get('diff2', '') |
|
602 | 602 | diff1 = request.GET.get('diff1', '') or diff2 |
|
603 | 603 | c.action = request.GET.get('diff') |
|
604 | 604 | c.no_changes = diff1 == diff2 |
|
605 | 605 | c.f_path = f_path |
|
606 | 606 | c.big_diff = False |
|
607 | 607 | c.anchor_url = anchor_url |
|
608 | 608 | c.ignorews_url = _ignorews_url |
|
609 | 609 | c.context_url = _context_url |
|
610 | 610 | c.changes = OrderedDict() |
|
611 | 611 | c.changes[diff2] = [] |
|
612 | 612 | |
|
613 | 613 | #special case if we want a show rev only, it's impl here |
|
614 | 614 | #to reduce JS and callbacks |
|
615 | 615 | |
|
616 | 616 | if request.GET.get('show_rev'): |
|
617 | 617 | if str2bool(request.GET.get('annotate', 'False')): |
|
618 | 618 | _url = url('files_annotate_home', repo_name=c.repo_name, |
|
619 | 619 | revision=diff1, f_path=c.f_path) |
|
620 | 620 | else: |
|
621 | 621 | _url = url('files_home', repo_name=c.repo_name, |
|
622 | 622 | revision=diff1, f_path=c.f_path) |
|
623 | 623 | |
|
624 | 624 | return redirect(_url) |
|
625 | 625 | try: |
|
626 | 626 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
627 | 627 | c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1) |
|
628 | 628 | try: |
|
629 | 629 | node1 = c.changeset_1.get_node(f_path) |
|
630 | 630 | if node1.is_dir(): |
|
631 | 631 | raise NodeError('%s path is a %s not a file' |
|
632 | 632 | % (node1, type(node1))) |
|
633 | 633 | except NodeDoesNotExistError: |
|
634 | 634 | c.changeset_1 = EmptyChangeset(cs=diff1, |
|
635 | 635 | revision=c.changeset_1.revision, |
|
636 | 636 | repo=c.db_repo_scm_instance) |
|
637 | 637 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
638 | 638 | else: |
|
639 | 639 | c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance) |
|
640 | 640 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
641 | 641 | |
|
642 | 642 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
643 | 643 | c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2) |
|
644 | 644 | try: |
|
645 | 645 | node2 = c.changeset_2.get_node(f_path) |
|
646 | 646 | if node2.is_dir(): |
|
647 | 647 | raise NodeError('%s path is a %s not a file' |
|
648 | 648 | % (node2, type(node2))) |
|
649 | 649 | except NodeDoesNotExistError: |
|
650 | 650 | c.changeset_2 = EmptyChangeset(cs=diff2, |
|
651 | 651 | revision=c.changeset_2.revision, |
|
652 | 652 | repo=c.db_repo_scm_instance) |
|
653 | 653 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
654 | 654 | else: |
|
655 | 655 | c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance) |
|
656 | 656 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
657 | 657 | except (RepositoryError, NodeError): |
|
658 | 658 | log.error(traceback.format_exc()) |
|
659 | 659 | return redirect(url('files_home', repo_name=c.repo_name, |
|
660 | 660 | f_path=f_path)) |
|
661 | 661 | |
|
662 | 662 | if c.action == 'download': |
|
663 | 663 | _diff = diffs.get_gitdiff(node1, node2, |
|
664 | 664 | ignore_whitespace=ignore_whitespace, |
|
665 | 665 | context=line_context) |
|
666 | 666 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
667 | 667 | |
|
668 | 668 | diff_name = '%s_vs_%s.diff' % (diff1, diff2) |
|
669 | 669 | response.content_type = 'text/plain' |
|
670 | 670 | response.content_disposition = ( |
|
671 | 671 | 'attachment; filename=%s' % diff_name |
|
672 | 672 | ) |
|
673 | 673 | return diff.as_raw() |
|
674 | 674 | |
|
675 | 675 | elif c.action == 'raw': |
|
676 | 676 | _diff = diffs.get_gitdiff(node1, node2, |
|
677 | 677 | ignore_whitespace=ignore_whitespace, |
|
678 | 678 | context=line_context) |
|
679 | 679 | diff = diffs.DiffProcessor(_diff, format='gitdiff') |
|
680 | 680 | response.content_type = 'text/plain' |
|
681 | 681 | return diff.as_raw() |
|
682 | 682 | |
|
683 | 683 | else: |
|
684 | 684 | fid = h.FID(diff2, node2.path) |
|
685 | 685 | line_context_lcl = get_line_ctx(fid, request.GET) |
|
686 | 686 | ign_whitespace_lcl = get_ignore_ws(fid, request.GET) |
|
687 | 687 | |
|
688 | 688 | lim = request.GET.get('fulldiff') or self.cut_off_limit |
|
689 | 689 | _, cs1, cs2, diff, st = diffs.wrapped_diff(filenode_old=node1, |
|
690 | 690 | filenode_new=node2, |
|
691 | 691 | cut_off_limit=lim, |
|
692 | 692 | ignore_whitespace=ign_whitespace_lcl, |
|
693 | 693 | line_context=line_context_lcl, |
|
694 | 694 | enable_comments=False) |
|
695 | 695 | op = '' |
|
696 | 696 | filename = node1.path |
|
697 | 697 | cs_changes = { |
|
698 | 698 | 'fid': [cs1, cs2, op, filename, diff, st] |
|
699 | 699 | } |
|
700 | 700 | c.changes = cs_changes |
|
701 | 701 | |
|
702 | 702 | return render('files/file_diff.html') |
|
703 | 703 | |
|
704 | 704 | @LoginRequired() |
|
705 | 705 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
706 | 706 | 'repository.admin') |
|
707 | 707 | def diff_2way(self, repo_name, f_path): |
|
708 | 708 | diff1 = request.GET.get('diff1', '') |
|
709 | 709 | diff2 = request.GET.get('diff2', '') |
|
710 | 710 | try: |
|
711 | 711 | if diff1 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
712 | 712 | c.changeset_1 = c.db_repo_scm_instance.get_changeset(diff1) |
|
713 | 713 | try: |
|
714 | 714 | node1 = c.changeset_1.get_node(f_path) |
|
715 | 715 | if node1.is_dir(): |
|
716 | 716 | raise NodeError('%s path is a %s not a file' |
|
717 | 717 | % (node1, type(node1))) |
|
718 | 718 | except NodeDoesNotExistError: |
|
719 | 719 | c.changeset_1 = EmptyChangeset(cs=diff1, |
|
720 | 720 | revision=c.changeset_1.revision, |
|
721 | 721 | repo=c.db_repo_scm_instance) |
|
722 | 722 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
723 | 723 | else: |
|
724 | 724 | c.changeset_1 = EmptyChangeset(repo=c.db_repo_scm_instance) |
|
725 | 725 | node1 = FileNode(f_path, '', changeset=c.changeset_1) |
|
726 | 726 | |
|
727 | 727 | if diff2 not in ['', None, 'None', '0' * 12, '0' * 40]: |
|
728 | 728 | c.changeset_2 = c.db_repo_scm_instance.get_changeset(diff2) |
|
729 | 729 | try: |
|
730 | 730 | node2 = c.changeset_2.get_node(f_path) |
|
731 | 731 | if node2.is_dir(): |
|
732 | 732 | raise NodeError('%s path is a %s not a file' |
|
733 | 733 | % (node2, type(node2))) |
|
734 | 734 | except NodeDoesNotExistError: |
|
735 | 735 | c.changeset_2 = EmptyChangeset(cs=diff2, |
|
736 | 736 | revision=c.changeset_2.revision, |
|
737 | 737 | repo=c.db_repo_scm_instance) |
|
738 | 738 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
739 | 739 | else: |
|
740 | 740 | c.changeset_2 = EmptyChangeset(repo=c.db_repo_scm_instance) |
|
741 | 741 | node2 = FileNode(f_path, '', changeset=c.changeset_2) |
|
742 | 742 | except ChangesetDoesNotExistError as e: |
|
743 | 743 | msg = _('Such revision does not exist for this repository') |
|
744 | 744 | h.flash(msg, category='error') |
|
745 | 745 | raise HTTPNotFound() |
|
746 | 746 | c.node1 = node1 |
|
747 | 747 | c.node2 = node2 |
|
748 | 748 | c.cs1 = c.changeset_1 |
|
749 | 749 | c.cs2 = c.changeset_2 |
|
750 | 750 | |
|
751 | 751 | return render('files/diff_2way.html') |
|
752 | 752 | |
|
753 | 753 | def _get_node_history(self, cs, f_path, changesets=None): |
|
754 | 754 | """ |
|
755 | 755 | get changesets history for given node |
|
756 | 756 | |
|
757 | 757 | :param cs: changeset to calculate history |
|
758 | 758 | :param f_path: path for node to calculate history for |
|
759 | 759 | :param changesets: if passed don't calculate history and take |
|
760 | 760 | changesets defined in this list |
|
761 | 761 | """ |
|
762 | 762 | # calculate history based on tip |
|
763 | 763 | tip_cs = c.db_repo_scm_instance.get_changeset() |
|
764 | 764 | if changesets is None: |
|
765 | 765 | try: |
|
766 | 766 | changesets = tip_cs.get_file_history(f_path) |
|
767 | 767 | except (NodeDoesNotExistError, ChangesetError): |
|
768 | 768 | #this node is not present at tip ! |
|
769 | 769 | changesets = cs.get_file_history(f_path) |
|
770 | 770 | hist_l = [] |
|
771 | 771 | |
|
772 | 772 | changesets_group = ([], _("Changesets")) |
|
773 | 773 | branches_group = ([], _("Branches")) |
|
774 | 774 | tags_group = ([], _("Tags")) |
|
775 | 775 | for chs in changesets: |
|
776 | 776 | #_branch = '(%s)' % chs.branch if (cs.repository.alias == 'hg') else '' |
|
777 | 777 | _branch = chs.branch |
|
778 | 778 | n_desc = '%s (%s)' % (h.show_id(chs), _branch) |
|
779 | 779 | changesets_group[0].append((chs.raw_id, n_desc,)) |
|
780 | 780 | hist_l.append(changesets_group) |
|
781 | 781 | |
|
782 | 782 | for name, chs in c.db_repo_scm_instance.branches.items(): |
|
783 | 783 | branches_group[0].append((chs, name),) |
|
784 | 784 | hist_l.append(branches_group) |
|
785 | 785 | |
|
786 | 786 | for name, chs in c.db_repo_scm_instance.tags.items(): |
|
787 | 787 | tags_group[0].append((chs, name),) |
|
788 | 788 | hist_l.append(tags_group) |
|
789 | 789 | |
|
790 | 790 | return hist_l, changesets |
|
791 | 791 | |
|
792 | 792 | @LoginRequired() |
|
793 | 793 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
794 | 794 | 'repository.admin') |
|
795 | 795 | @jsonify |
|
796 | 796 | def nodelist(self, repo_name, revision, f_path): |
|
797 | 797 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
798 | 798 | cs = self.__get_cs(revision) |
|
799 | 799 | _d, _f = ScmModel().get_nodes(repo_name, cs.raw_id, f_path, |
|
800 | 800 | flat=False) |
|
801 | 801 | return {'nodes': _d + _f} |
@@ -1,242 +1,242 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.login |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Login controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 22, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import logging |
|
30 | 30 | import formencode |
|
31 | 31 | import urlparse |
|
32 | 32 | |
|
33 | 33 | from formencode import htmlfill |
|
34 | 34 | from webob.exc import HTTPFound |
|
35 | 35 | from pylons.i18n.translation import _ |
|
36 | 36 | from pylons.controllers.util import redirect |
|
37 | 37 | from pylons import request, session, tmpl_context as c, url |
|
38 | 38 | |
|
39 | 39 | import kallithea.lib.helpers as h |
|
40 | 40 | from kallithea.lib.auth import AuthUser, HasPermissionAnyDecorator |
|
41 | 41 | from kallithea.lib.base import BaseController, log_in_user, render |
|
42 | 42 | from kallithea.lib.exceptions import UserCreationError |
|
43 | 43 | from kallithea.lib.utils2 import safe_str |
|
44 | 44 | from kallithea.model.db import User, Setting |
|
45 | 45 | from kallithea.model.forms import LoginForm, RegisterForm, PasswordResetForm |
|
46 | 46 | from kallithea.model.user import UserModel |
|
47 | 47 | from kallithea.model.meta import Session |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | class LoginController(BaseController): |
|
54 | 54 | |
|
55 | 55 | def __before__(self): |
|
56 | 56 | super(LoginController, self).__before__() |
|
57 | 57 | |
|
58 | 58 | def _validate_came_from(self, came_from): |
|
59 | 59 | """Return True if came_from is valid and can and should be used""" |
|
60 | 60 | if not came_from: |
|
61 | 61 | return False |
|
62 | 62 | |
|
63 | 63 | parsed = urlparse.urlparse(came_from) |
|
64 | 64 | server_parsed = urlparse.urlparse(url.current()) |
|
65 | 65 | allowed_schemes = ['http', 'https'] |
|
66 | 66 | if parsed.scheme and parsed.scheme not in allowed_schemes: |
|
67 |
log.error('Suspicious URL scheme detected %s for url %s' |
|
|
68 |
|
|
|
67 | log.error('Suspicious URL scheme detected %s for url %s', | |
|
68 | parsed.scheme, parsed) | |
|
69 | 69 | return False |
|
70 | 70 | if server_parsed.netloc != parsed.netloc: |
|
71 | 71 | log.error('Suspicious NETLOC detected %s for url %s server url ' |
|
72 | 72 | 'is: %s' % (parsed.netloc, parsed, server_parsed)) |
|
73 | 73 | return False |
|
74 | 74 | return True |
|
75 | 75 | |
|
76 | 76 | def _redirect_to_origin(self, origin): |
|
77 | 77 | '''redirect to the original page, preserving any get arguments given''' |
|
78 | 78 | request.GET.pop('came_from', None) |
|
79 | 79 | raise HTTPFound(location=url(origin, **request.GET)) |
|
80 | 80 | |
|
81 | 81 | def index(self): |
|
82 | 82 | c.came_from = safe_str(request.GET.get('came_from', '')) |
|
83 | 83 | if not self._validate_came_from(c.came_from): |
|
84 | 84 | c.came_from = url('home') |
|
85 | 85 | |
|
86 | 86 | not_default = self.authuser.username != User.DEFAULT_USER |
|
87 | 87 | ip_allowed = AuthUser.check_ip_allowed(self.authuser, self.ip_addr) |
|
88 | 88 | |
|
89 | 89 | # redirect if already logged in |
|
90 | 90 | if self.authuser.is_authenticated and not_default and ip_allowed: |
|
91 | 91 | return self._redirect_to_origin(c.came_from) |
|
92 | 92 | |
|
93 | 93 | if request.POST: |
|
94 | 94 | # import Login Form validator class |
|
95 | 95 | login_form = LoginForm() |
|
96 | 96 | try: |
|
97 | 97 | c.form_result = login_form.to_python(dict(request.POST)) |
|
98 | 98 | # form checks for username/password, now we're authenticated |
|
99 | 99 | username = c.form_result['username'] |
|
100 | 100 | user = User.get_by_username(username, case_insensitive=True) |
|
101 | 101 | except formencode.Invalid as errors: |
|
102 | 102 | defaults = errors.value |
|
103 | 103 | # remove password from filling in form again |
|
104 | 104 | del defaults['password'] |
|
105 | 105 | return htmlfill.render( |
|
106 | 106 | render('/login.html'), |
|
107 | 107 | defaults=errors.value, |
|
108 | 108 | errors=errors.error_dict or {}, |
|
109 | 109 | prefix_error=False, |
|
110 | 110 | encoding="UTF-8", |
|
111 | 111 | force_defaults=False) |
|
112 | 112 | except UserCreationError as e: |
|
113 | 113 | # container auth or other auth functions that create users on |
|
114 | 114 | # the fly can throw this exception signaling that there's issue |
|
115 | 115 | # with user creation, explanation should be provided in |
|
116 | 116 | # Exception itself |
|
117 | 117 | h.flash(e, 'error') |
|
118 | 118 | else: |
|
119 | 119 | log_in_user(user, c.form_result['remember'], |
|
120 | 120 | is_external_auth=False) |
|
121 | 121 | return self._redirect_to_origin(c.came_from) |
|
122 | 122 | |
|
123 | 123 | return render('/login.html') |
|
124 | 124 | |
|
125 | 125 | @HasPermissionAnyDecorator('hg.admin', 'hg.register.auto_activate', |
|
126 | 126 | 'hg.register.manual_activate') |
|
127 | 127 | def register(self): |
|
128 | 128 | c.auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
129 | 129 | .AuthUser.permissions['global'] |
|
130 | 130 | |
|
131 | 131 | settings = Setting.get_app_settings() |
|
132 | 132 | captcha_private_key = settings.get('captcha_private_key') |
|
133 | 133 | c.captcha_active = bool(captcha_private_key) |
|
134 | 134 | c.captcha_public_key = settings.get('captcha_public_key') |
|
135 | 135 | |
|
136 | 136 | if request.POST: |
|
137 | 137 | register_form = RegisterForm()() |
|
138 | 138 | try: |
|
139 | 139 | form_result = register_form.to_python(dict(request.POST)) |
|
140 | 140 | form_result['active'] = c.auto_active |
|
141 | 141 | |
|
142 | 142 | if c.captcha_active: |
|
143 | 143 | from kallithea.lib.recaptcha import submit |
|
144 | 144 | response = submit(request.POST.get('recaptcha_challenge_field'), |
|
145 | 145 | request.POST.get('recaptcha_response_field'), |
|
146 | 146 | private_key=captcha_private_key, |
|
147 | 147 | remoteip=self.ip_addr) |
|
148 | 148 | if c.captcha_active and not response.is_valid: |
|
149 | 149 | _value = form_result |
|
150 | 150 | _msg = _('Bad captcha') |
|
151 | 151 | error_dict = {'recaptcha_field': _msg} |
|
152 | 152 | raise formencode.Invalid(_msg, _value, None, |
|
153 | 153 | error_dict=error_dict) |
|
154 | 154 | |
|
155 | 155 | UserModel().create_registration(form_result) |
|
156 | 156 | h.flash(_('You have successfully registered into Kallithea'), |
|
157 | 157 | category='success') |
|
158 | 158 | Session().commit() |
|
159 | 159 | return redirect(url('login_home')) |
|
160 | 160 | |
|
161 | 161 | except formencode.Invalid as errors: |
|
162 | 162 | return htmlfill.render( |
|
163 | 163 | render('/register.html'), |
|
164 | 164 | defaults=errors.value, |
|
165 | 165 | errors=errors.error_dict or {}, |
|
166 | 166 | prefix_error=False, |
|
167 | 167 | encoding="UTF-8", |
|
168 | 168 | force_defaults=False) |
|
169 | 169 | except UserCreationError as e: |
|
170 | 170 | # container auth or other auth functions that create users on |
|
171 | 171 | # the fly can throw this exception signaling that there's issue |
|
172 | 172 | # with user creation, explanation should be provided in |
|
173 | 173 | # Exception itself |
|
174 | 174 | h.flash(e, 'error') |
|
175 | 175 | |
|
176 | 176 | return render('/register.html') |
|
177 | 177 | |
|
178 | 178 | def password_reset(self): |
|
179 | 179 | settings = Setting.get_app_settings() |
|
180 | 180 | captcha_private_key = settings.get('captcha_private_key') |
|
181 | 181 | c.captcha_active = bool(captcha_private_key) |
|
182 | 182 | c.captcha_public_key = settings.get('captcha_public_key') |
|
183 | 183 | |
|
184 | 184 | if request.POST: |
|
185 | 185 | password_reset_form = PasswordResetForm()() |
|
186 | 186 | try: |
|
187 | 187 | form_result = password_reset_form.to_python(dict(request.POST)) |
|
188 | 188 | if c.captcha_active: |
|
189 | 189 | from kallithea.lib.recaptcha import submit |
|
190 | 190 | response = submit(request.POST.get('recaptcha_challenge_field'), |
|
191 | 191 | request.POST.get('recaptcha_response_field'), |
|
192 | 192 | private_key=captcha_private_key, |
|
193 | 193 | remoteip=self.ip_addr) |
|
194 | 194 | if c.captcha_active and not response.is_valid: |
|
195 | 195 | _value = form_result |
|
196 | 196 | _msg = _('Bad captcha') |
|
197 | 197 | error_dict = {'recaptcha_field': _msg} |
|
198 | 198 | raise formencode.Invalid(_msg, _value, None, |
|
199 | 199 | error_dict=error_dict) |
|
200 | 200 | UserModel().reset_password_link(form_result) |
|
201 | 201 | h.flash(_('Your password reset link was sent'), |
|
202 | 202 | category='success') |
|
203 | 203 | return redirect(url('login_home')) |
|
204 | 204 | |
|
205 | 205 | except formencode.Invalid as errors: |
|
206 | 206 | return htmlfill.render( |
|
207 | 207 | render('/password_reset.html'), |
|
208 | 208 | defaults=errors.value, |
|
209 | 209 | errors=errors.error_dict or {}, |
|
210 | 210 | prefix_error=False, |
|
211 | 211 | encoding="UTF-8", |
|
212 | 212 | force_defaults=False) |
|
213 | 213 | |
|
214 | 214 | return render('/password_reset.html') |
|
215 | 215 | |
|
216 | 216 | def password_reset_confirmation(self): |
|
217 | 217 | if request.GET and request.GET.get('key'): |
|
218 | 218 | try: |
|
219 | 219 | user = User.get_by_api_key(request.GET.get('key')) |
|
220 | 220 | data = dict(email=user.email) |
|
221 | 221 | UserModel().reset_password(data) |
|
222 | 222 | h.flash(_('Your password reset was successful, ' |
|
223 | 223 | 'new password has been sent to your email'), |
|
224 | 224 | category='success') |
|
225 | 225 | except Exception as e: |
|
226 | 226 | log.error(e) |
|
227 | 227 | return redirect(url('reset_password')) |
|
228 | 228 | |
|
229 | 229 | return redirect(url('login_home')) |
|
230 | 230 | |
|
231 | 231 | def logout(self): |
|
232 | 232 | session.delete() |
|
233 | 233 | log.info('Logging out and deleting session for user') |
|
234 | 234 | redirect(url('home')) |
|
235 | 235 | |
|
236 | 236 | def authentication_token(self): |
|
237 | 237 | """Return the CSRF protection token for the session - just like it |
|
238 | 238 | could have been screen scraped from a page with a form. |
|
239 | 239 | Only intended for testing but might also be useful for other kinds |
|
240 | 240 | of automation. |
|
241 | 241 | """ |
|
242 | 242 | return h.authentication_token() |
@@ -1,782 +1,782 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.pullrequests |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | pull requests controller for Kallithea for initializing pull requests |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: May 7, 2012 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | import formencode |
|
31 | 31 | import re |
|
32 | 32 | |
|
33 | 33 | from webob.exc import HTTPNotFound, HTTPForbidden, HTTPBadRequest |
|
34 | 34 | |
|
35 | 35 | from pylons import request, tmpl_context as c, url |
|
36 | 36 | from pylons.controllers.util import redirect |
|
37 | 37 | from pylons.i18n.translation import _ |
|
38 | 38 | |
|
39 | 39 | from kallithea.lib.vcs.utils.hgcompat import unionrepo |
|
40 | 40 | from kallithea.lib.compat import json |
|
41 | 41 | from kallithea.lib.base import BaseRepoController, render |
|
42 | 42 | from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\ |
|
43 | 43 | NotAnonymous |
|
44 | 44 | from kallithea.lib.helpers import Page |
|
45 | 45 | from kallithea.lib import helpers as h |
|
46 | 46 | from kallithea.lib import diffs |
|
47 | 47 | from kallithea.lib.exceptions import UserInvalidException |
|
48 | 48 | from kallithea.lib.utils import action_logger, jsonify |
|
49 | 49 | from kallithea.lib.vcs.utils import safe_str |
|
50 | 50 | from kallithea.lib.vcs.exceptions import EmptyRepositoryError |
|
51 | 51 | from kallithea.lib.diffs import LimitedDiffContainer |
|
52 | 52 | from kallithea.model.db import PullRequest, ChangesetStatus, ChangesetComment,\ |
|
53 | 53 | PullRequestReviewers |
|
54 | 54 | from kallithea.model.pull_request import PullRequestModel |
|
55 | 55 | from kallithea.model.meta import Session |
|
56 | 56 | from kallithea.model.repo import RepoModel |
|
57 | 57 | from kallithea.model.comment import ChangesetCommentsModel |
|
58 | 58 | from kallithea.model.changeset_status import ChangesetStatusModel |
|
59 | 59 | from kallithea.model.forms import PullRequestForm, PullRequestPostForm |
|
60 | 60 | from kallithea.lib.utils2 import safe_int |
|
61 | 61 | from kallithea.controllers.changeset import _ignorews_url,\ |
|
62 | 62 | _context_url, get_line_ctx, get_ignore_ws |
|
63 | 63 | from kallithea.controllers.compare import CompareController |
|
64 | 64 | from kallithea.lib.graphmod import graph_data |
|
65 | 65 | |
|
66 | 66 | log = logging.getLogger(__name__) |
|
67 | 67 | |
|
68 | 68 | |
|
69 | 69 | class PullrequestsController(BaseRepoController): |
|
70 | 70 | |
|
71 | 71 | def __before__(self): |
|
72 | 72 | super(PullrequestsController, self).__before__() |
|
73 | 73 | repo_model = RepoModel() |
|
74 | 74 | c.users_array = repo_model.get_users_js() |
|
75 | 75 | c.user_groups_array = repo_model.get_user_groups_js() |
|
76 | 76 | |
|
77 | 77 | def _get_repo_refs(self, repo, rev=None, branch=None, branch_rev=None): |
|
78 | 78 | """return a structure with repo's interesting changesets, suitable for |
|
79 | 79 | the selectors in pullrequest.html |
|
80 | 80 | |
|
81 | 81 | rev: a revision that must be in the list somehow and selected by default |
|
82 | 82 | branch: a branch that must be in the list and selected by default - even if closed |
|
83 | 83 | branch_rev: a revision of which peers should be preferred and available.""" |
|
84 | 84 | # list named branches that has been merged to this named branch - it should probably merge back |
|
85 | 85 | peers = [] |
|
86 | 86 | |
|
87 | 87 | if rev: |
|
88 | 88 | rev = safe_str(rev) |
|
89 | 89 | |
|
90 | 90 | if branch: |
|
91 | 91 | branch = safe_str(branch) |
|
92 | 92 | |
|
93 | 93 | if branch_rev: |
|
94 | 94 | branch_rev = safe_str(branch_rev) |
|
95 | 95 | # a revset not restricting to merge() would be better |
|
96 | 96 | # (especially because it would get the branch point) |
|
97 | 97 | # ... but is currently too expensive |
|
98 | 98 | # including branches of children could be nice too |
|
99 | 99 | peerbranches = set() |
|
100 | 100 | for i in repo._repo.revs( |
|
101 | 101 | "sort(parents(branch(id(%s)) and merge()) - branch(id(%s)), -rev)", |
|
102 | 102 | branch_rev, branch_rev): |
|
103 | 103 | abranch = repo.get_changeset(i).branch |
|
104 | 104 | if abranch not in peerbranches: |
|
105 | 105 | n = 'branch:%s:%s' % (abranch, repo.get_changeset(abranch).raw_id) |
|
106 | 106 | peers.append((n, abranch)) |
|
107 | 107 | peerbranches.add(abranch) |
|
108 | 108 | |
|
109 | 109 | selected = None |
|
110 | 110 | tiprev = repo.tags.get('tip') |
|
111 | 111 | tipbranch = None |
|
112 | 112 | |
|
113 | 113 | branches = [] |
|
114 | 114 | for abranch, branchrev in repo.branches.iteritems(): |
|
115 | 115 | n = 'branch:%s:%s' % (abranch, branchrev) |
|
116 | 116 | desc = abranch |
|
117 | 117 | if branchrev == tiprev: |
|
118 | 118 | tipbranch = abranch |
|
119 | 119 | desc = '%s (current tip)' % desc |
|
120 | 120 | branches.append((n, desc)) |
|
121 | 121 | if rev == branchrev: |
|
122 | 122 | selected = n |
|
123 | 123 | if branch == abranch: |
|
124 | 124 | if not rev: |
|
125 | 125 | selected = n |
|
126 | 126 | branch = None |
|
127 | 127 | if branch: # branch not in list - it is probably closed |
|
128 | 128 | branchrev = repo.closed_branches.get(branch) |
|
129 | 129 | if branchrev: |
|
130 | 130 | n = 'branch:%s:%s' % (branch, branchrev) |
|
131 | 131 | branches.append((n, _('%s (closed)') % branch)) |
|
132 | 132 | selected = n |
|
133 | 133 | branch = None |
|
134 | 134 | if branch: |
|
135 | 135 | log.debug('branch %r not found in %s', branch, repo) |
|
136 | 136 | |
|
137 | 137 | bookmarks = [] |
|
138 | 138 | for bookmark, bookmarkrev in repo.bookmarks.iteritems(): |
|
139 | 139 | n = 'book:%s:%s' % (bookmark, bookmarkrev) |
|
140 | 140 | bookmarks.append((n, bookmark)) |
|
141 | 141 | if rev == bookmarkrev: |
|
142 | 142 | selected = n |
|
143 | 143 | |
|
144 | 144 | tags = [] |
|
145 | 145 | for tag, tagrev in repo.tags.iteritems(): |
|
146 | 146 | if tag == 'tip': |
|
147 | 147 | continue |
|
148 | 148 | n = 'tag:%s:%s' % (tag, tagrev) |
|
149 | 149 | tags.append((n, tag)) |
|
150 | 150 | if rev == tagrev: |
|
151 | 151 | selected = n |
|
152 | 152 | |
|
153 | 153 | # prio 1: rev was selected as existing entry above |
|
154 | 154 | |
|
155 | 155 | # prio 2: create special entry for rev; rev _must_ be used |
|
156 | 156 | specials = [] |
|
157 | 157 | if rev and selected is None: |
|
158 | 158 | selected = 'rev:%s:%s' % (rev, rev) |
|
159 | 159 | specials = [(selected, '%s: %s' % (_("Changeset"), rev[:12]))] |
|
160 | 160 | |
|
161 | 161 | # prio 3: most recent peer branch |
|
162 | 162 | if peers and not selected: |
|
163 | 163 | selected = peers[0][0] |
|
164 | 164 | |
|
165 | 165 | # prio 4: tip revision |
|
166 | 166 | if not selected: |
|
167 | 167 | if h.is_hg(repo): |
|
168 | 168 | if tipbranch: |
|
169 | 169 | selected = 'branch:%s:%s' % (tipbranch, tiprev) |
|
170 | 170 | else: |
|
171 | 171 | selected = 'tag:null:' + repo.EMPTY_CHANGESET |
|
172 | 172 | tags.append((selected, 'null')) |
|
173 | 173 | else: |
|
174 | 174 | if 'master' in repo.branches: |
|
175 | 175 | selected = 'branch:master:%s' % repo.branches['master'] |
|
176 | 176 | else: |
|
177 | 177 | k, v = repo.branches.items()[0] |
|
178 | 178 | selected = 'branch:%s:%s' % (k, v) |
|
179 | 179 | |
|
180 | 180 | groups = [(specials, _("Special")), |
|
181 | 181 | (peers, _("Peer branches")), |
|
182 | 182 | (bookmarks, _("Bookmarks")), |
|
183 | 183 | (branches, _("Branches")), |
|
184 | 184 | (tags, _("Tags")), |
|
185 | 185 | ] |
|
186 | 186 | return [g for g in groups if g[0]], selected |
|
187 | 187 | |
|
188 | 188 | def _get_is_allowed_change_status(self, pull_request): |
|
189 | 189 | if pull_request.is_closed(): |
|
190 | 190 | return False |
|
191 | 191 | |
|
192 | 192 | owner = self.authuser.user_id == pull_request.user_id |
|
193 | 193 | reviewer = self.authuser.user_id in [x.user_id for x in |
|
194 | 194 | pull_request.reviewers] |
|
195 | 195 | return self.authuser.admin or owner or reviewer |
|
196 | 196 | |
|
197 | 197 | @LoginRequired() |
|
198 | 198 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
199 | 199 | 'repository.admin') |
|
200 | 200 | def show_all(self, repo_name): |
|
201 | 201 | c.from_ = request.GET.get('from_') or '' |
|
202 | 202 | c.closed = request.GET.get('closed') or '' |
|
203 | 203 | c.pull_requests = PullRequestModel().get_all(repo_name, from_=c.from_, closed=c.closed) |
|
204 | 204 | c.repo_name = repo_name |
|
205 | 205 | p = safe_int(request.GET.get('page', 1), 1) |
|
206 | 206 | |
|
207 | 207 | c.pullrequests_pager = Page(c.pull_requests, page=p, items_per_page=10) |
|
208 | 208 | |
|
209 | 209 | if request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
210 | 210 | return render('/pullrequests/pullrequest_data.html') |
|
211 | 211 | |
|
212 | 212 | return render('/pullrequests/pullrequest_show_all.html') |
|
213 | 213 | |
|
214 | 214 | @LoginRequired() |
|
215 | 215 | @NotAnonymous() |
|
216 | 216 | def show_my(self): |
|
217 | 217 | c.closed = request.GET.get('closed') or '' |
|
218 | 218 | |
|
219 | 219 | def _filter(pr): |
|
220 | 220 | s = sorted(pr, key=lambda o: o.created_on, reverse=True) |
|
221 | 221 | if not c.closed: |
|
222 | 222 | s = filter(lambda p: p.status != PullRequest.STATUS_CLOSED, s) |
|
223 | 223 | return s |
|
224 | 224 | |
|
225 | 225 | c.my_pull_requests = _filter(PullRequest.query()\ |
|
226 | 226 | .filter(PullRequest.user_id == |
|
227 | 227 | self.authuser.user_id)\ |
|
228 | 228 | .all()) |
|
229 | 229 | |
|
230 | 230 | c.participate_in_pull_requests = _filter(PullRequest.query()\ |
|
231 | 231 | .join(PullRequestReviewers)\ |
|
232 | 232 | .filter(PullRequestReviewers.user_id == |
|
233 | 233 | self.authuser.user_id)\ |
|
234 | 234 | ) |
|
235 | 235 | |
|
236 | 236 | return render('/pullrequests/pullrequest_show_my.html') |
|
237 | 237 | |
|
238 | 238 | @LoginRequired() |
|
239 | 239 | @NotAnonymous() |
|
240 | 240 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
241 | 241 | 'repository.admin') |
|
242 | 242 | def index(self): |
|
243 | 243 | org_repo = c.db_repo |
|
244 | 244 | org_scm_instance = org_repo.scm_instance |
|
245 | 245 | try: |
|
246 | 246 | org_scm_instance.get_changeset() |
|
247 | 247 | except EmptyRepositoryError as e: |
|
248 | 248 | h.flash(h.literal(_('There are no changesets yet')), |
|
249 | 249 | category='warning') |
|
250 | 250 | redirect(url('summary_home', repo_name=org_repo.repo_name)) |
|
251 | 251 | |
|
252 | 252 | org_rev = request.GET.get('rev_end') |
|
253 | 253 | # rev_start is not directly useful - its parent could however be used |
|
254 | 254 | # as default for other and thus give a simple compare view |
|
255 | 255 | #other_rev = request.POST.get('rev_start') |
|
256 | 256 | branch = request.GET.get('branch') |
|
257 | 257 | |
|
258 | 258 | c.cs_repos = [(org_repo.repo_name, org_repo.repo_name)] |
|
259 | 259 | c.default_cs_repo = org_repo.repo_name |
|
260 | 260 | c.cs_refs, c.default_cs_ref = self._get_repo_refs(org_scm_instance, rev=org_rev, branch=branch) |
|
261 | 261 | |
|
262 | 262 | default_cs_ref_type, default_cs_branch, default_cs_rev = c.default_cs_ref.split(':') |
|
263 | 263 | if default_cs_ref_type != 'branch': |
|
264 | 264 | default_cs_branch = org_repo.get_changeset(default_cs_rev).branch |
|
265 | 265 | |
|
266 | 266 | # add org repo to other so we can open pull request against peer branches on itself |
|
267 | 267 | c.a_repos = [(org_repo.repo_name, '%s (self)' % org_repo.repo_name)] |
|
268 | 268 | |
|
269 | 269 | if org_repo.parent: |
|
270 | 270 | # add parent of this fork also and select it. |
|
271 | 271 | # use the same branch on destination as on source, if available. |
|
272 | 272 | c.a_repos.append((org_repo.parent.repo_name, '%s (parent)' % org_repo.parent.repo_name)) |
|
273 | 273 | c.a_repo = org_repo.parent |
|
274 | 274 | c.a_refs, c.default_a_ref = self._get_repo_refs( |
|
275 | 275 | org_repo.parent.scm_instance, branch=default_cs_branch) |
|
276 | 276 | |
|
277 | 277 | else: |
|
278 | 278 | c.a_repo = org_repo |
|
279 | 279 | c.a_refs, c.default_a_ref = self._get_repo_refs(org_scm_instance) # without rev and branch |
|
280 | 280 | |
|
281 | 281 | # gather forks and add to this list ... even though it is rare to |
|
282 | 282 | # request forks to pull from their parent |
|
283 | 283 | for fork in org_repo.forks: |
|
284 | 284 | c.a_repos.append((fork.repo_name, fork.repo_name)) |
|
285 | 285 | |
|
286 | 286 | return render('/pullrequests/pullrequest.html') |
|
287 | 287 | |
|
288 | 288 | @LoginRequired() |
|
289 | 289 | @NotAnonymous() |
|
290 | 290 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
291 | 291 | 'repository.admin') |
|
292 | 292 | @jsonify |
|
293 | 293 | def repo_info(self, repo_name): |
|
294 | 294 | repo = RepoModel()._get_repo(repo_name) |
|
295 | 295 | refs, selected_ref = self._get_repo_refs(repo.scm_instance) |
|
296 | 296 | return { |
|
297 | 297 | 'description': repo.description.split('\n', 1)[0], |
|
298 | 298 | 'selected_ref': selected_ref, |
|
299 | 299 | 'refs': refs, |
|
300 | 300 | } |
|
301 | 301 | |
|
302 | 302 | @LoginRequired() |
|
303 | 303 | @NotAnonymous() |
|
304 | 304 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
305 | 305 | 'repository.admin') |
|
306 | 306 | def create(self, repo_name): |
|
307 | 307 | repo = RepoModel()._get_repo(repo_name) |
|
308 | 308 | try: |
|
309 | 309 | _form = PullRequestForm(repo.repo_id)().to_python(request.POST) |
|
310 | 310 | except formencode.Invalid as errors: |
|
311 | 311 | log.error(traceback.format_exc()) |
|
312 | 312 | log.error(str(errors)) |
|
313 | 313 | msg = _('Error creating pull request: %s') % errors.msg |
|
314 | 314 | h.flash(msg, 'error') |
|
315 | 315 | raise HTTPBadRequest |
|
316 | 316 | |
|
317 | 317 | # heads up: org and other might seem backward here ... |
|
318 | 318 | org_repo_name = _form['org_repo'] |
|
319 | 319 | org_ref = _form['org_ref'] # will have merge_rev as rev but symbolic name |
|
320 | 320 | org_repo = RepoModel()._get_repo(org_repo_name) |
|
321 | 321 | (org_ref_type, |
|
322 | 322 | org_ref_name, |
|
323 | 323 | org_rev) = org_ref.split(':') |
|
324 | 324 | if org_ref_type == 'rev': |
|
325 | 325 | org_ref_type = 'branch' |
|
326 | 326 | cs = org_repo.scm_instance.get_changeset(org_rev) |
|
327 | 327 | org_ref = '%s:%s:%s' % (org_ref_type, cs.branch, cs.raw_id) |
|
328 | 328 | |
|
329 | 329 | other_repo_name = _form['other_repo'] |
|
330 | 330 | other_ref = _form['other_ref'] # will have symbolic name and head revision |
|
331 | 331 | other_repo = RepoModel()._get_repo(other_repo_name) |
|
332 | 332 | (other_ref_type, |
|
333 | 333 | other_ref_name, |
|
334 | 334 | other_rev) = other_ref.split(':') |
|
335 | 335 | |
|
336 | 336 | cs_ranges, _cs_ranges_not, ancestor_rev = \ |
|
337 | 337 | CompareController._get_changesets(org_repo.scm_instance.alias, |
|
338 | 338 | other_repo.scm_instance, other_rev, # org and other "swapped" |
|
339 | 339 | org_repo.scm_instance, org_rev, |
|
340 | 340 | ) |
|
341 | 341 | if ancestor_rev is None: |
|
342 | 342 | ancestor_rev = org_repo.scm_instance.EMPTY_CHANGESET |
|
343 | 343 | revisions = [cs.raw_id for cs in cs_ranges] |
|
344 | 344 | |
|
345 | 345 | # hack: ancestor_rev is not an other_rev but we want to show the |
|
346 | 346 | # requested destination and have the exact ancestor |
|
347 | 347 | other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev) |
|
348 | 348 | |
|
349 | 349 | reviewers = _form['review_members'] |
|
350 | 350 | |
|
351 | 351 | title = _form['pullrequest_title'] |
|
352 | 352 | if not title: |
|
353 | 353 | if org_repo_name == other_repo_name: |
|
354 | 354 | title = '%s to %s' % (h.short_ref(org_ref_type, org_ref_name), |
|
355 | 355 | h.short_ref(other_ref_type, other_ref_name)) |
|
356 | 356 | else: |
|
357 | 357 | title = '%s#%s to %s#%s' % (org_repo_name, h.short_ref(org_ref_type, org_ref_name), |
|
358 | 358 | other_repo_name, h.short_ref(other_ref_type, other_ref_name)) |
|
359 | 359 | description = _form['pullrequest_desc'].strip() or _('No description') |
|
360 | 360 | try: |
|
361 | 361 | pull_request = PullRequestModel().create( |
|
362 | 362 | self.authuser.user_id, org_repo_name, org_ref, other_repo_name, |
|
363 | 363 | other_ref, revisions, reviewers, title, description |
|
364 | 364 | ) |
|
365 | 365 | Session().commit() |
|
366 | 366 | h.flash(_('Successfully opened new pull request'), |
|
367 | 367 | category='success') |
|
368 | 368 | except UserInvalidException as u: |
|
369 | 369 | h.flash(_('Invalid reviewer "%s" specified') % u, category='error') |
|
370 | 370 | raise HTTPBadRequest() |
|
371 | 371 | except Exception: |
|
372 | 372 | h.flash(_('Error occurred while creating pull request'), |
|
373 | 373 | category='error') |
|
374 | 374 | log.error(traceback.format_exc()) |
|
375 | 375 | return redirect(url('pullrequest_home', repo_name=repo_name)) |
|
376 | 376 | |
|
377 | 377 | return redirect(pull_request.url()) |
|
378 | 378 | |
|
379 | 379 | def create_update(self, old_pull_request, updaterev, title, description, reviewers_ids): |
|
380 | 380 | org_repo = RepoModel()._get_repo(old_pull_request.org_repo.repo_name) |
|
381 | 381 | org_ref_type, org_ref_name, org_rev = old_pull_request.org_ref.split(':') |
|
382 | 382 | new_org_rev = self._get_ref_rev(org_repo, 'rev', updaterev) |
|
383 | 383 | |
|
384 | 384 | other_repo = RepoModel()._get_repo(old_pull_request.other_repo.repo_name) |
|
385 | 385 | other_ref_type, other_ref_name, other_rev = old_pull_request.other_ref.split(':') # other_rev is ancestor |
|
386 | 386 | #assert other_ref_type == 'branch', other_ref_type # TODO: what if not? |
|
387 | 387 | new_other_rev = self._get_ref_rev(other_repo, other_ref_type, other_ref_name) |
|
388 | 388 | |
|
389 | 389 | cs_ranges, _cs_ranges_not, ancestor_rev = CompareController._get_changesets(org_repo.scm_instance.alias, |
|
390 | 390 | other_repo.scm_instance, new_other_rev, # org and other "swapped" |
|
391 | 391 | org_repo.scm_instance, new_org_rev) |
|
392 | 392 | |
|
393 | 393 | old_revisions = set(old_pull_request.revisions) |
|
394 | 394 | revisions = [cs.raw_id for cs in cs_ranges] |
|
395 | 395 | new_revisions = [r for r in revisions if r not in old_revisions] |
|
396 | 396 | lost = old_revisions.difference(revisions) |
|
397 | 397 | |
|
398 | 398 | infos = ['This is an update of %s "%s".' % |
|
399 | 399 | (h.canonical_url('pullrequest_show', repo_name=old_pull_request.other_repo.repo_name, |
|
400 | 400 | pull_request_id=old_pull_request.pull_request_id), |
|
401 | 401 | old_pull_request.title)] |
|
402 | 402 | |
|
403 | 403 | if lost: |
|
404 | 404 | infos.append(_('Missing changesets since the previous pull request:')) |
|
405 | 405 | for r in old_pull_request.revisions: |
|
406 | 406 | if r in lost: |
|
407 | 407 | rev_desc = org_repo.get_changeset(r).message.split('\n')[0] |
|
408 | 408 | infos.append(' %s "%s"' % (h.short_id(r), rev_desc)) |
|
409 | 409 | |
|
410 | 410 | if new_revisions: |
|
411 | 411 | infos.append(_('New changesets on %s %s since the previous pull request:') % (org_ref_type, org_ref_name)) |
|
412 | 412 | for r in reversed(revisions): |
|
413 | 413 | if r in new_revisions: |
|
414 | 414 | rev_desc = org_repo.get_changeset(r).message.split('\n')[0] |
|
415 | 415 | infos.append(' %s %s' % (h.short_id(r), h.shorter(rev_desc, 80))) |
|
416 | 416 | |
|
417 | 417 | if ancestor_rev == other_rev: |
|
418 | 418 | infos.append(_("Ancestor didn't change - show diff since previous version:")) |
|
419 | 419 | infos.append(h.canonical_url('compare_url', |
|
420 | 420 | repo_name=org_repo.repo_name, # other_repo is always same as repo_name |
|
421 | 421 | org_ref_type='rev', org_ref_name=h.short_id(org_rev), # use old org_rev as base |
|
422 | 422 | other_ref_type='rev', other_ref_name=h.short_id(new_org_rev), |
|
423 | 423 | )) # note: linear diff, merge or not doesn't matter |
|
424 | 424 | else: |
|
425 | 425 | infos.append(_('This pull request is based on another %s revision and there is no simple diff.') % other_ref_name) |
|
426 | 426 | else: |
|
427 | 427 | infos.append(_('No changes found on %s %s since previous version.') % (org_ref_type, org_ref_name)) |
|
428 | 428 | # TODO: fail? |
|
429 | 429 | |
|
430 | 430 | # hack: ancestor_rev is not an other_ref but we want to show the |
|
431 | 431 | # requested destination and have the exact ancestor |
|
432 | 432 | new_other_ref = '%s:%s:%s' % (other_ref_type, other_ref_name, ancestor_rev) |
|
433 | 433 | new_org_ref = '%s:%s:%s' % (org_ref_type, org_ref_name, new_org_rev) |
|
434 | 434 | |
|
435 | 435 | try: |
|
436 | 436 | title, old_v = re.match(r'(.*)\(v(\d+)\)\s*$', title).groups() |
|
437 | 437 | v = int(old_v) + 1 |
|
438 | 438 | except (AttributeError, ValueError): |
|
439 | 439 | v = 2 |
|
440 | 440 | title = '%s (v%s)' % (title.strip(), v) |
|
441 | 441 | |
|
442 | 442 | # using a mail-like separator, insert new update info at the top of the list |
|
443 | 443 | descriptions = description.replace('\r\n', '\n').split('\n-- \n', 1) |
|
444 | 444 | description = descriptions[0].strip() + '\n\n-- \n' + '\n'.join(infos) |
|
445 | 445 | if len(descriptions) > 1: |
|
446 | 446 | description += '\n\n' + descriptions[1].strip() |
|
447 | 447 | |
|
448 | 448 | try: |
|
449 | 449 | pull_request = PullRequestModel().create( |
|
450 | 450 | self.authuser.user_id, |
|
451 | 451 | old_pull_request.org_repo.repo_name, new_org_ref, |
|
452 | 452 | old_pull_request.other_repo.repo_name, new_other_ref, |
|
453 | 453 | revisions, reviewers_ids, title, description |
|
454 | 454 | ) |
|
455 | 455 | except UserInvalidException as u: |
|
456 | 456 | h.flash(_('Invalid reviewer "%s" specified') % u, category='error') |
|
457 | 457 | raise HTTPBadRequest() |
|
458 | 458 | except Exception: |
|
459 | 459 | h.flash(_('Error occurred while creating pull request'), |
|
460 | 460 | category='error') |
|
461 | 461 | log.error(traceback.format_exc()) |
|
462 | 462 | return redirect(old_pull_request.url()) |
|
463 | 463 | |
|
464 | 464 | ChangesetCommentsModel().create( |
|
465 | 465 | text=_('Closed, replaced by %s .') % pull_request.url(canonical=True), |
|
466 | 466 | repo=old_pull_request.other_repo.repo_id, |
|
467 | 467 | user=c.authuser.user_id, |
|
468 | 468 | pull_request=old_pull_request.pull_request_id, |
|
469 | 469 | closing_pr=True) |
|
470 | 470 | PullRequestModel().close_pull_request(old_pull_request.pull_request_id) |
|
471 | 471 | |
|
472 | 472 | Session().commit() |
|
473 | 473 | h.flash(_('Pull request update created'), |
|
474 | 474 | category='success') |
|
475 | 475 | |
|
476 | 476 | return redirect(pull_request.url()) |
|
477 | 477 | |
|
478 | 478 | # pullrequest_post for PR editing |
|
479 | 479 | @LoginRequired() |
|
480 | 480 | @NotAnonymous() |
|
481 | 481 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
482 | 482 | 'repository.admin') |
|
483 | 483 | def post(self, repo_name, pull_request_id): |
|
484 | 484 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
485 | 485 | if pull_request.is_closed(): |
|
486 | 486 | raise HTTPForbidden() |
|
487 | 487 | assert pull_request.other_repo.repo_name == repo_name |
|
488 | 488 | #only owner or admin can update it |
|
489 | 489 | owner = pull_request.author.user_id == c.authuser.user_id |
|
490 | 490 | repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name) |
|
491 | 491 | if not (h.HasPermissionAny('hg.admin') or repo_admin or owner): |
|
492 | 492 | raise HTTPForbidden() |
|
493 | 493 | |
|
494 | 494 | _form = PullRequestPostForm()().to_python(request.POST) |
|
495 | 495 | reviewers_ids = [int(s) for s in _form['review_members']] |
|
496 | 496 | |
|
497 | 497 | if _form['updaterev']: |
|
498 | 498 | return self.create_update(pull_request, |
|
499 | 499 | _form['updaterev'], |
|
500 | 500 | _form['pullrequest_title'], |
|
501 | 501 | _form['pullrequest_desc'], |
|
502 | 502 | reviewers_ids) |
|
503 | 503 | |
|
504 | 504 | old_description = pull_request.description |
|
505 | 505 | pull_request.title = _form['pullrequest_title'] |
|
506 | 506 | pull_request.description = _form['pullrequest_desc'].strip() or _('No description') |
|
507 | 507 | try: |
|
508 | 508 | PullRequestModel().mention_from_description(pull_request, old_description) |
|
509 | 509 | PullRequestModel().update_reviewers(pull_request_id, reviewers_ids) |
|
510 | 510 | except UserInvalidException as u: |
|
511 | 511 | h.flash(_('Invalid reviewer "%s" specified') % u, category='error') |
|
512 | 512 | raise HTTPBadRequest() |
|
513 | 513 | |
|
514 | 514 | Session().commit() |
|
515 | 515 | h.flash(_('Pull request updated'), category='success') |
|
516 | 516 | |
|
517 | 517 | return redirect(pull_request.url()) |
|
518 | 518 | |
|
519 | 519 | @LoginRequired() |
|
520 | 520 | @NotAnonymous() |
|
521 | 521 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
522 | 522 | 'repository.admin') |
|
523 | 523 | @jsonify |
|
524 | 524 | def delete(self, repo_name, pull_request_id): |
|
525 | 525 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
526 | 526 | #only owner can delete it ! |
|
527 | 527 | if pull_request.author.user_id == c.authuser.user_id: |
|
528 | 528 | PullRequestModel().delete(pull_request) |
|
529 | 529 | Session().commit() |
|
530 | 530 | h.flash(_('Successfully deleted pull request'), |
|
531 | 531 | category='success') |
|
532 | 532 | return redirect(url('my_pullrequests')) |
|
533 | 533 | raise HTTPForbidden() |
|
534 | 534 | |
|
535 | 535 | @LoginRequired() |
|
536 | 536 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
537 | 537 | 'repository.admin') |
|
538 | 538 | def show(self, repo_name, pull_request_id, extra=None): |
|
539 | 539 | repo_model = RepoModel() |
|
540 | 540 | c.users_array = repo_model.get_users_js() |
|
541 | 541 | c.user_groups_array = repo_model.get_user_groups_js() |
|
542 | 542 | c.pull_request = PullRequest.get_or_404(pull_request_id) |
|
543 | 543 | c.allowed_to_change_status = self._get_is_allowed_change_status(c.pull_request) |
|
544 | 544 | cc_model = ChangesetCommentsModel() |
|
545 | 545 | cs_model = ChangesetStatusModel() |
|
546 | 546 | |
|
547 | 547 | # pull_requests repo_name we opened it against |
|
548 | 548 | # ie. other_repo must match |
|
549 | 549 | if repo_name != c.pull_request.other_repo.repo_name: |
|
550 | 550 | raise HTTPNotFound |
|
551 | 551 | |
|
552 | 552 | # load compare data into template context |
|
553 | 553 | c.cs_repo = c.pull_request.org_repo |
|
554 | 554 | (c.cs_ref_type, |
|
555 | 555 | c.cs_ref_name, |
|
556 | 556 | c.cs_rev) = c.pull_request.org_ref.split(':') |
|
557 | 557 | |
|
558 | 558 | c.a_repo = c.pull_request.other_repo |
|
559 | 559 | (c.a_ref_type, |
|
560 | 560 | c.a_ref_name, |
|
561 | 561 | c.a_rev) = c.pull_request.other_ref.split(':') # other_rev is ancestor |
|
562 | 562 | |
|
563 | 563 | org_scm_instance = c.cs_repo.scm_instance # property with expensive cache invalidation check!!! |
|
564 | 564 | c.cs_repo = c.cs_repo |
|
565 | 565 | c.cs_ranges = [org_scm_instance.get_changeset(x) for x in c.pull_request.revisions] |
|
566 | 566 | c.cs_ranges_org = None # not stored and not important and moving target - could be calculated ... |
|
567 | 567 | revs = [ctx.revision for ctx in reversed(c.cs_ranges)] |
|
568 | 568 | c.jsdata = json.dumps(graph_data(org_scm_instance, revs)) |
|
569 | 569 | |
|
570 | 570 | avail_revs = set() |
|
571 | 571 | avail_show = [] |
|
572 | 572 | c.cs_branch_name = c.cs_ref_name |
|
573 | 573 | other_scm_instance = c.a_repo.scm_instance |
|
574 | 574 | c.update_msg = "" |
|
575 | 575 | c.update_msg_other = "" |
|
576 | 576 | if org_scm_instance.alias == 'hg' and c.a_ref_name != 'ancestor': |
|
577 | 577 | if c.cs_ref_type != 'branch': |
|
578 | 578 | c.cs_branch_name = org_scm_instance.get_changeset(c.cs_ref_name).branch # use ref_type ? |
|
579 | 579 | c.a_branch_name = c.a_ref_name |
|
580 | 580 | if c.a_ref_type != 'branch': |
|
581 | 581 | try: |
|
582 | 582 | c.a_branch_name = other_scm_instance.get_changeset(c.a_ref_name).branch # use ref_type ? |
|
583 | 583 | except EmptyRepositoryError: |
|
584 | 584 | c.a_branch_name = 'null' # not a branch name ... but close enough |
|
585 | 585 | # candidates: descendants of old head that are on the right branch |
|
586 | 586 | # and not are the old head itself ... |
|
587 | 587 | # and nothing at all if old head is a descendant of target ref name |
|
588 | 588 | if other_scm_instance._repo.revs('present(%s)::&%s', c.cs_ranges[-1].raw_id, c.a_branch_name): |
|
589 | 589 | c.update_msg = _('This pull request has already been merged to %s.') % c.a_branch_name |
|
590 | 590 | elif c.pull_request.is_closed(): |
|
591 | 591 | c.update_msg = _('This pull request has been closed and can not be updated.') |
|
592 | 592 | else: # look for descendants of PR head on source branch in org repo |
|
593 | 593 | avail_revs = org_scm_instance._repo.revs('%s:: & branch(%s)', |
|
594 | 594 | revs[0], c.cs_branch_name) |
|
595 | 595 | if len(avail_revs) > 1: # more than just revs[0] |
|
596 | 596 | # also show changesets that not are descendants but would be merged in |
|
597 | 597 | targethead = other_scm_instance.get_changeset(c.a_branch_name).raw_id |
|
598 | 598 | if org_scm_instance.path != other_scm_instance.path: |
|
599 | 599 | # Note: org_scm_instance.path must come first so all |
|
600 | 600 | # valid revision numbers are 100% org_scm compatible |
|
601 | 601 | # - both for avail_revs and for revset results |
|
602 | 602 | hgrepo = unionrepo.unionrepository(org_scm_instance.baseui, |
|
603 | 603 | org_scm_instance.path, |
|
604 | 604 | other_scm_instance.path) |
|
605 | 605 | else: |
|
606 | 606 | hgrepo = org_scm_instance._repo |
|
607 | 607 | show = set(hgrepo.revs('::%ld & !::%s & !::%s', |
|
608 | 608 | avail_revs, revs[0], targethead)) |
|
609 | 609 | c.update_msg = _('This pull request can be updated with changes on %s:') % c.cs_branch_name |
|
610 | 610 | else: |
|
611 | 611 | show = set() |
|
612 | 612 | c.update_msg = _('No changesets found for updating this pull request.') |
|
613 | 613 | |
|
614 | 614 | # TODO: handle branch heads that not are tip-most |
|
615 | 615 | brevs = org_scm_instance._repo.revs('%s - %ld', c.cs_branch_name, avail_revs) |
|
616 | 616 | if brevs: |
|
617 | 617 | # also show changesets that are on branch but neither ancestors nor descendants |
|
618 | 618 | show.update(org_scm_instance._repo.revs('::%ld - ::%ld - ::%s', brevs, avail_revs, c.a_branch_name)) |
|
619 | 619 | show.add(revs[0]) # make sure graph shows this so we can see how they relate |
|
620 | 620 | c.update_msg_other = _('Note: Branch %s has another head: %s.') % (c.cs_branch_name, |
|
621 | 621 | h.short_id(org_scm_instance.get_changeset((max(brevs))).raw_id)) |
|
622 | 622 | |
|
623 | 623 | avail_show = sorted(show, reverse=True) |
|
624 | 624 | |
|
625 | 625 | elif org_scm_instance.alias == 'git': |
|
626 | 626 | c.update_msg = _("Git pull requests don't support updates yet.") |
|
627 | 627 | |
|
628 | 628 | c.avail_revs = avail_revs |
|
629 | 629 | c.avail_cs = [org_scm_instance.get_changeset(r) for r in avail_show] |
|
630 | 630 | c.avail_jsdata = json.dumps(graph_data(org_scm_instance, avail_show)) |
|
631 | 631 | |
|
632 | 632 | raw_ids = [x.raw_id for x in c.cs_ranges] |
|
633 | 633 | c.cs_comments = c.cs_repo.get_comments(raw_ids) |
|
634 | 634 | c.statuses = c.cs_repo.statuses(raw_ids) |
|
635 | 635 | |
|
636 | 636 | ignore_whitespace = request.GET.get('ignorews') == '1' |
|
637 | 637 | line_context = request.GET.get('context', 3) |
|
638 | 638 | c.ignorews_url = _ignorews_url |
|
639 | 639 | c.context_url = _context_url |
|
640 | 640 | c.fulldiff = request.GET.get('fulldiff') |
|
641 | 641 | diff_limit = self.cut_off_limit if not c.fulldiff else None |
|
642 | 642 | |
|
643 | 643 | # we swap org/other ref since we run a simple diff on one repo |
|
644 | log.debug('running diff between %s and %s in %s' | |
|
645 |
|
|
|
644 | log.debug('running diff between %s and %s in %s', | |
|
645 | c.a_rev, c.cs_rev, org_scm_instance.path) | |
|
646 | 646 | txtdiff = org_scm_instance.get_diff(rev1=safe_str(c.a_rev), rev2=safe_str(c.cs_rev), |
|
647 | 647 | ignore_whitespace=ignore_whitespace, |
|
648 | 648 | context=line_context) |
|
649 | 649 | |
|
650 | 650 | diff_processor = diffs.DiffProcessor(txtdiff or '', format='gitdiff', |
|
651 | 651 | diff_limit=diff_limit) |
|
652 | 652 | _parsed = diff_processor.prepare() |
|
653 | 653 | |
|
654 | 654 | c.limited_diff = False |
|
655 | 655 | if isinstance(_parsed, LimitedDiffContainer): |
|
656 | 656 | c.limited_diff = True |
|
657 | 657 | |
|
658 | 658 | c.files = [] |
|
659 | 659 | c.changes = {} |
|
660 | 660 | c.lines_added = 0 |
|
661 | 661 | c.lines_deleted = 0 |
|
662 | 662 | |
|
663 | 663 | for f in _parsed: |
|
664 | 664 | st = f['stats'] |
|
665 | 665 | c.lines_added += st['added'] |
|
666 | 666 | c.lines_deleted += st['deleted'] |
|
667 | 667 | fid = h.FID('', f['filename']) |
|
668 | 668 | c.files.append([fid, f['operation'], f['filename'], f['stats']]) |
|
669 | 669 | htmldiff = diff_processor.as_html(enable_comments=True, |
|
670 | 670 | parsed_lines=[f]) |
|
671 | 671 | c.changes[fid] = [f['operation'], f['filename'], htmldiff] |
|
672 | 672 | |
|
673 | 673 | # inline comments |
|
674 | 674 | c.inline_cnt = 0 |
|
675 | 675 | c.inline_comments = cc_model.get_inline_comments( |
|
676 | 676 | c.db_repo.repo_id, |
|
677 | 677 | pull_request=pull_request_id) |
|
678 | 678 | # count inline comments |
|
679 | 679 | for __, lines in c.inline_comments: |
|
680 | 680 | for comments in lines.values(): |
|
681 | 681 | c.inline_cnt += len(comments) |
|
682 | 682 | # comments |
|
683 | 683 | c.comments = cc_model.get_comments(c.db_repo.repo_id, |
|
684 | 684 | pull_request=pull_request_id) |
|
685 | 685 | |
|
686 | 686 | # (badly named) pull-request status calculation based on reviewer votes |
|
687 | 687 | (c.pull_request_reviewers, |
|
688 | 688 | c.pull_request_pending_reviewers, |
|
689 | 689 | c.current_voting_result, |
|
690 | 690 | ) = cs_model.calculate_pull_request_result(c.pull_request) |
|
691 | 691 | c.changeset_statuses = ChangesetStatus.STATUSES |
|
692 | 692 | |
|
693 | 693 | c.as_form = False |
|
694 | 694 | c.ancestor = None # there is one - but right here we don't know which |
|
695 | 695 | return render('/pullrequests/pullrequest_show.html') |
|
696 | 696 | |
|
697 | 697 | @LoginRequired() |
|
698 | 698 | @NotAnonymous() |
|
699 | 699 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
700 | 700 | 'repository.admin') |
|
701 | 701 | @jsonify |
|
702 | 702 | def comment(self, repo_name, pull_request_id): |
|
703 | 703 | pull_request = PullRequest.get_or_404(pull_request_id) |
|
704 | 704 | |
|
705 | 705 | status = 0 |
|
706 | 706 | close_pr = False |
|
707 | 707 | allowed_to_change_status = self._get_is_allowed_change_status(pull_request) |
|
708 | 708 | if allowed_to_change_status: |
|
709 | 709 | status = request.POST.get('changeset_status') |
|
710 | 710 | close_pr = request.POST.get('save_close') |
|
711 | 711 | text = request.POST.get('text', '').strip() |
|
712 | 712 | if close_pr: |
|
713 | 713 | text = _('Closing.') + '\n' + text |
|
714 | 714 | |
|
715 | 715 | comment = ChangesetCommentsModel().create( |
|
716 | 716 | text=text, |
|
717 | 717 | repo=c.db_repo.repo_id, |
|
718 | 718 | user=c.authuser.user_id, |
|
719 | 719 | pull_request=pull_request_id, |
|
720 | 720 | f_path=request.POST.get('f_path'), |
|
721 | 721 | line_no=request.POST.get('line'), |
|
722 | 722 | status_change=(ChangesetStatus.get_status_lbl(status) |
|
723 | 723 | if status and allowed_to_change_status else None), |
|
724 | 724 | closing_pr=close_pr |
|
725 | 725 | ) |
|
726 | 726 | |
|
727 | 727 | action_logger(self.authuser, |
|
728 | 728 | 'user_commented_pull_request:%s' % pull_request_id, |
|
729 | 729 | c.db_repo, self.ip_addr, self.sa) |
|
730 | 730 | |
|
731 | 731 | if allowed_to_change_status: |
|
732 | 732 | # get status if set ! |
|
733 | 733 | if status: |
|
734 | 734 | ChangesetStatusModel().set_status( |
|
735 | 735 | c.db_repo.repo_id, |
|
736 | 736 | status, |
|
737 | 737 | c.authuser.user_id, |
|
738 | 738 | comment, |
|
739 | 739 | pull_request=pull_request_id |
|
740 | 740 | ) |
|
741 | 741 | |
|
742 | 742 | if close_pr: |
|
743 | 743 | PullRequestModel().close_pull_request(pull_request_id) |
|
744 | 744 | action_logger(self.authuser, |
|
745 | 745 | 'user_closed_pull_request:%s' % pull_request_id, |
|
746 | 746 | c.db_repo, self.ip_addr, self.sa) |
|
747 | 747 | |
|
748 | 748 | Session().commit() |
|
749 | 749 | |
|
750 | 750 | if not request.environ.get('HTTP_X_PARTIAL_XHR'): |
|
751 | 751 | return redirect(pull_request.url()) |
|
752 | 752 | |
|
753 | 753 | data = { |
|
754 | 754 | 'target_id': h.safeid(h.safe_unicode(request.POST.get('f_path'))), |
|
755 | 755 | } |
|
756 | 756 | if comment is not None: |
|
757 | 757 | c.comment = comment |
|
758 | 758 | data.update(comment.get_dict()) |
|
759 | 759 | data.update({'rendered_text': |
|
760 | 760 | render('changeset/changeset_comment_block.html')}) |
|
761 | 761 | |
|
762 | 762 | return data |
|
763 | 763 | |
|
764 | 764 | @LoginRequired() |
|
765 | 765 | @NotAnonymous() |
|
766 | 766 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
767 | 767 | 'repository.admin') |
|
768 | 768 | @jsonify |
|
769 | 769 | def delete_comment(self, repo_name, comment_id): |
|
770 | 770 | co = ChangesetComment.get(comment_id) |
|
771 | 771 | if co.pull_request.is_closed(): |
|
772 | 772 | #don't allow deleting comments on closed pull request |
|
773 | 773 | raise HTTPForbidden() |
|
774 | 774 | |
|
775 | 775 | owner = co.author.user_id == c.authuser.user_id |
|
776 | 776 | repo_admin = h.HasRepoPermissionAny('repository.admin')(c.repo_name) |
|
777 | 777 | if h.HasPermissionAny('hg.admin') or repo_admin or owner: |
|
778 | 778 | ChangesetCommentsModel().delete(comment=co) |
|
779 | 779 | Session().commit() |
|
780 | 780 | return True |
|
781 | 781 | else: |
|
782 | 782 | raise HTTPForbidden() |
@@ -1,147 +1,147 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.search |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Search controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Aug 7, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | import traceback |
|
30 | 30 | import urllib |
|
31 | 31 | from pylons.i18n.translation import _ |
|
32 | 32 | from pylons import request, config, tmpl_context as c |
|
33 | 33 | |
|
34 | 34 | from whoosh.index import open_dir, EmptyIndexError |
|
35 | 35 | from whoosh.qparser import QueryParser, QueryParserError |
|
36 | 36 | from whoosh.query import Phrase, Prefix |
|
37 | 37 | from webhelpers.util import update_params |
|
38 | 38 | |
|
39 | 39 | from kallithea.lib.auth import LoginRequired |
|
40 | 40 | from kallithea.lib.base import BaseRepoController, render |
|
41 | 41 | from kallithea.lib.indexers import CHGSETS_SCHEMA, SCHEMA, CHGSET_IDX_NAME, \ |
|
42 | 42 | IDX_NAME, WhooshResultWrapper |
|
43 | 43 | from kallithea.model.repo import RepoModel |
|
44 | 44 | from kallithea.lib.utils2 import safe_str, safe_int |
|
45 | 45 | from kallithea.lib.helpers import Page |
|
46 | 46 | |
|
47 | 47 | log = logging.getLogger(__name__) |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | class SearchController(BaseRepoController): |
|
51 | 51 | |
|
52 | 52 | def __before__(self): |
|
53 | 53 | super(SearchController, self).__before__() |
|
54 | 54 | |
|
55 | 55 | @LoginRequired() |
|
56 | 56 | def index(self, repo_name=None): |
|
57 | 57 | c.repo_name = repo_name |
|
58 | 58 | c.formated_results = [] |
|
59 | 59 | c.runtime = '' |
|
60 | 60 | c.cur_query = request.GET.get('q', None) |
|
61 | 61 | c.cur_type = request.GET.get('type', 'content') |
|
62 | 62 | c.cur_search = search_type = {'content': 'content', |
|
63 | 63 | 'commit': 'message', |
|
64 | 64 | 'path': 'path', |
|
65 | 65 | 'repository': 'repository' |
|
66 | 66 | }.get(c.cur_type, 'content') |
|
67 | 67 | |
|
68 | 68 | index_name = { |
|
69 | 69 | 'content': IDX_NAME, |
|
70 | 70 | 'commit': CHGSET_IDX_NAME, |
|
71 | 71 | 'path': IDX_NAME |
|
72 | 72 | }.get(c.cur_type, IDX_NAME) |
|
73 | 73 | |
|
74 | 74 | schema_defn = { |
|
75 | 75 | 'content': SCHEMA, |
|
76 | 76 | 'commit': CHGSETS_SCHEMA, |
|
77 | 77 | 'path': SCHEMA |
|
78 | 78 | }.get(c.cur_type, SCHEMA) |
|
79 | 79 | |
|
80 |
log.debug('IDX: %s' |
|
|
81 |
log.debug('SCHEMA: %s' |
|
|
80 | log.debug('IDX: %s', index_name) | |
|
81 | log.debug('SCHEMA: %s', schema_defn) | |
|
82 | 82 | |
|
83 | 83 | if c.cur_query: |
|
84 | 84 | cur_query = c.cur_query.lower() |
|
85 | 85 | log.debug(cur_query) |
|
86 | 86 | |
|
87 | 87 | if c.cur_query: |
|
88 | 88 | p = safe_int(request.GET.get('page', 1), 1) |
|
89 | 89 | highlight_items = set() |
|
90 | 90 | try: |
|
91 | 91 | idx = open_dir(config['app_conf']['index_dir'], |
|
92 | 92 | indexname=index_name) |
|
93 | 93 | searcher = idx.searcher() |
|
94 | 94 | |
|
95 | 95 | qp = QueryParser(search_type, schema=schema_defn) |
|
96 | 96 | if c.repo_name: |
|
97 | 97 | cur_query = u'repository:%s %s' % (c.repo_name, cur_query) |
|
98 | 98 | try: |
|
99 | 99 | query = qp.parse(unicode(cur_query)) |
|
100 | 100 | # extract words for highlight |
|
101 | 101 | if isinstance(query, Phrase): |
|
102 | 102 | highlight_items.update(query.words) |
|
103 | 103 | elif isinstance(query, Prefix): |
|
104 | 104 | highlight_items.add(query.text) |
|
105 | 105 | else: |
|
106 | 106 | for i in query.all_terms(): |
|
107 | 107 | if i[0] in ['content', 'message']: |
|
108 | 108 | highlight_items.add(i[1]) |
|
109 | 109 | |
|
110 | 110 | matcher = query.matcher(searcher) |
|
111 | 111 | |
|
112 |
log.debug('query: %s' |
|
|
113 |
log.debug('hl terms: %s' |
|
|
112 | log.debug('query: %s', query) | |
|
113 | log.debug('hl terms: %s', highlight_items) | |
|
114 | 114 | results = searcher.search(query) |
|
115 | 115 | res_ln = len(results) |
|
116 | 116 | c.runtime = '%s results (%.3f seconds)' % ( |
|
117 | 117 | res_ln, results.runtime |
|
118 | 118 | ) |
|
119 | 119 | |
|
120 | 120 | def url_generator(**kw): |
|
121 | 121 | q = urllib.quote(safe_str(c.cur_query)) |
|
122 | 122 | return update_params("?q=%s&type=%s" \ |
|
123 | 123 | % (q, safe_str(c.cur_type)), **kw) |
|
124 | 124 | repo_location = RepoModel().repos_path |
|
125 | 125 | c.formated_results = Page( |
|
126 | 126 | WhooshResultWrapper(search_type, searcher, matcher, |
|
127 | 127 | highlight_items, repo_location), |
|
128 | 128 | page=p, |
|
129 | 129 | item_count=res_ln, |
|
130 | 130 | items_per_page=10, |
|
131 | 131 | url=url_generator |
|
132 | 132 | ) |
|
133 | 133 | |
|
134 | 134 | except QueryParserError: |
|
135 | 135 | c.runtime = _('Invalid search query. Try quoting it.') |
|
136 | 136 | searcher.close() |
|
137 | 137 | except (EmptyIndexError, IOError): |
|
138 | 138 | log.error(traceback.format_exc()) |
|
139 | 139 | log.error('Empty Index data') |
|
140 | 140 | c.runtime = _('There is no index to search in. ' |
|
141 | 141 | 'Please run whoosh indexer') |
|
142 | 142 | except (Exception): |
|
143 | 143 | log.error(traceback.format_exc()) |
|
144 | 144 | c.runtime = _('An error occurred during search operation.') |
|
145 | 145 | |
|
146 | 146 | # Return a rendered template |
|
147 | 147 | return render('/search/search.html') |
@@ -1,228 +1,228 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.controllers.summary |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Summary controller for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 18, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import traceback |
|
29 | 29 | import calendar |
|
30 | 30 | import logging |
|
31 | 31 | from time import mktime |
|
32 | 32 | from datetime import timedelta, date |
|
33 | 33 | |
|
34 | 34 | from pylons import tmpl_context as c, request |
|
35 | 35 | from pylons.i18n.translation import _ |
|
36 | 36 | from webob.exc import HTTPBadRequest |
|
37 | 37 | |
|
38 | 38 | from beaker.cache import cache_region, region_invalidate |
|
39 | 39 | |
|
40 | 40 | from kallithea.lib.compat import product |
|
41 | 41 | from kallithea.lib.vcs.exceptions import ChangesetError, EmptyRepositoryError, \ |
|
42 | 42 | NodeDoesNotExistError |
|
43 | 43 | from kallithea.config.conf import ALL_READMES, ALL_EXTS, LANGUAGES_EXTENSIONS_MAP |
|
44 | 44 | from kallithea.model.db import Statistics, CacheInvalidation, User |
|
45 | 45 | from kallithea.lib.utils import jsonify |
|
46 | 46 | from kallithea.lib.utils2 import safe_str |
|
47 | 47 | from kallithea.lib.auth import LoginRequired, HasRepoPermissionAnyDecorator,\ |
|
48 | 48 | NotAnonymous |
|
49 | 49 | from kallithea.lib.base import BaseRepoController, render |
|
50 | 50 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
51 | 51 | from kallithea.lib.markup_renderer import MarkupRenderer |
|
52 | 52 | from kallithea.lib.celerylib import run_task |
|
53 | 53 | from kallithea.lib.celerylib.tasks import get_commits_stats |
|
54 | 54 | from kallithea.lib.compat import json |
|
55 | 55 | from kallithea.lib.vcs.nodes import FileNode |
|
56 | 56 | from kallithea.controllers.changelog import _load_changelog_summary |
|
57 | 57 | |
|
58 | 58 | log = logging.getLogger(__name__) |
|
59 | 59 | |
|
60 | 60 | README_FILES = [''.join([x[0][0], x[1][0]]) for x in |
|
61 | 61 | sorted(list(product(ALL_READMES, ALL_EXTS)), |
|
62 | 62 | key=lambda y:y[0][1] + y[1][1])] |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class SummaryController(BaseRepoController): |
|
66 | 66 | |
|
67 | 67 | def __before__(self): |
|
68 | 68 | super(SummaryController, self).__before__() |
|
69 | 69 | |
|
70 | 70 | def __get_readme_data(self, db_repo): |
|
71 | 71 | repo_name = db_repo.repo_name |
|
72 | 72 | log.debug('Looking for README file') |
|
73 | 73 | |
|
74 | 74 | @cache_region('long_term') |
|
75 | 75 | def _get_readme_from_cache(key, kind): |
|
76 | 76 | readme_data = None |
|
77 | 77 | readme_file = None |
|
78 | 78 | try: |
|
79 | 79 | # gets the landing revision! or tip if fails |
|
80 | 80 | cs = db_repo.get_landing_changeset() |
|
81 | 81 | if isinstance(cs, EmptyChangeset): |
|
82 | 82 | raise EmptyRepositoryError() |
|
83 | 83 | renderer = MarkupRenderer() |
|
84 | 84 | for f in README_FILES: |
|
85 | 85 | try: |
|
86 | 86 | readme = cs.get_node(f) |
|
87 | 87 | if not isinstance(readme, FileNode): |
|
88 | 88 | continue |
|
89 | 89 | readme_file = f |
|
90 |
log.debug('Found README file `%s` rendering...' |
|
|
90 | log.debug('Found README file `%s` rendering...', | |
|
91 | 91 | readme_file) |
|
92 | 92 | readme_data = renderer.render(readme.content, |
|
93 | 93 | filename=f) |
|
94 | 94 | break |
|
95 | 95 | except NodeDoesNotExistError: |
|
96 | 96 | continue |
|
97 | 97 | except ChangesetError: |
|
98 | 98 | log.error(traceback.format_exc()) |
|
99 | 99 | pass |
|
100 | 100 | except EmptyRepositoryError: |
|
101 | 101 | pass |
|
102 | 102 | |
|
103 | 103 | return readme_data, readme_file |
|
104 | 104 | |
|
105 | 105 | kind = 'README' |
|
106 | 106 | valid = CacheInvalidation.test_and_set_valid(repo_name, kind) |
|
107 | 107 | if not valid: |
|
108 | 108 | region_invalidate(_get_readme_from_cache, None, repo_name, kind) |
|
109 | 109 | return _get_readme_from_cache(repo_name, kind) |
|
110 | 110 | |
|
111 | 111 | @LoginRequired() |
|
112 | 112 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
113 | 113 | 'repository.admin') |
|
114 | 114 | def index(self, repo_name): |
|
115 | 115 | _load_changelog_summary() |
|
116 | 116 | |
|
117 | 117 | username = '' |
|
118 | 118 | if self.authuser.username != User.DEFAULT_USER: |
|
119 | 119 | username = safe_str(self.authuser.username) |
|
120 | 120 | |
|
121 | 121 | _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl |
|
122 | 122 | if '{repo}' in _def_clone_uri: |
|
123 | 123 | _def_clone_uri_by_id = _def_clone_uri.replace('{repo}', '_{repoid}') |
|
124 | 124 | elif '{repoid}' in _def_clone_uri: |
|
125 | 125 | _def_clone_uri_by_id = _def_clone_uri.replace('_{repoid}', '{repo}') |
|
126 | 126 | |
|
127 | 127 | c.clone_repo_url = c.db_repo.clone_url(user=username, |
|
128 | 128 | uri_tmpl=_def_clone_uri) |
|
129 | 129 | c.clone_repo_url_id = c.db_repo.clone_url(user=username, |
|
130 | 130 | uri_tmpl=_def_clone_uri_by_id) |
|
131 | 131 | |
|
132 | 132 | if c.db_repo.enable_statistics: |
|
133 | 133 | c.show_stats = True |
|
134 | 134 | else: |
|
135 | 135 | c.show_stats = False |
|
136 | 136 | |
|
137 | 137 | stats = self.sa.query(Statistics)\ |
|
138 | 138 | .filter(Statistics.repository == c.db_repo)\ |
|
139 | 139 | .scalar() |
|
140 | 140 | |
|
141 | 141 | c.stats_percentage = 0 |
|
142 | 142 | |
|
143 | 143 | if stats and stats.languages: |
|
144 | 144 | c.no_data = False is c.db_repo.enable_statistics |
|
145 | 145 | lang_stats_d = json.loads(stats.languages) |
|
146 | 146 | |
|
147 | 147 | lang_stats = ((x, {"count": y, |
|
148 | 148 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) |
|
149 | 149 | for x, y in lang_stats_d.items()) |
|
150 | 150 | |
|
151 | 151 | c.trending_languages = json.dumps( |
|
152 | 152 | sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10] |
|
153 | 153 | ) |
|
154 | 154 | else: |
|
155 | 155 | c.no_data = True |
|
156 | 156 | c.trending_languages = json.dumps([]) |
|
157 | 157 | |
|
158 | 158 | c.enable_downloads = c.db_repo.enable_downloads |
|
159 | 159 | c.readme_data, c.readme_file = \ |
|
160 | 160 | self.__get_readme_data(c.db_repo) |
|
161 | 161 | return render('summary/summary.html') |
|
162 | 162 | |
|
163 | 163 | @LoginRequired() |
|
164 | 164 | @NotAnonymous() |
|
165 | 165 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
166 | 166 | 'repository.admin') |
|
167 | 167 | @jsonify |
|
168 | 168 | def repo_size(self, repo_name): |
|
169 | 169 | if request.is_xhr: |
|
170 | 170 | return c.db_repo._repo_size() |
|
171 | 171 | else: |
|
172 | 172 | raise HTTPBadRequest() |
|
173 | 173 | |
|
174 | 174 | @LoginRequired() |
|
175 | 175 | @HasRepoPermissionAnyDecorator('repository.read', 'repository.write', |
|
176 | 176 | 'repository.admin') |
|
177 | 177 | def statistics(self, repo_name): |
|
178 | 178 | if c.db_repo.enable_statistics: |
|
179 | 179 | c.show_stats = True |
|
180 | 180 | c.no_data_msg = _('No data ready yet') |
|
181 | 181 | else: |
|
182 | 182 | c.show_stats = False |
|
183 | 183 | c.no_data_msg = _('Statistics are disabled for this repository') |
|
184 | 184 | |
|
185 | 185 | td = date.today() + timedelta(days=1) |
|
186 | 186 | td_1m = td - timedelta(days=calendar.mdays[td.month]) |
|
187 | 187 | td_1y = td - timedelta(days=365) |
|
188 | 188 | |
|
189 | 189 | ts_min_m = mktime(td_1m.timetuple()) |
|
190 | 190 | ts_min_y = mktime(td_1y.timetuple()) |
|
191 | 191 | ts_max_y = mktime(td.timetuple()) |
|
192 | 192 | c.ts_min = ts_min_m |
|
193 | 193 | c.ts_max = ts_max_y |
|
194 | 194 | |
|
195 | 195 | stats = self.sa.query(Statistics)\ |
|
196 | 196 | .filter(Statistics.repository == c.db_repo)\ |
|
197 | 197 | .scalar() |
|
198 | 198 | if stats and stats.languages: |
|
199 | 199 | c.no_data = False is c.db_repo.enable_statistics |
|
200 | 200 | lang_stats_d = json.loads(stats.languages) |
|
201 | 201 | c.commit_data = stats.commit_activity |
|
202 | 202 | c.overview_data = stats.commit_activity_combined |
|
203 | 203 | |
|
204 | 204 | lang_stats = ((x, {"count": y, |
|
205 | 205 | "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) |
|
206 | 206 | for x, y in lang_stats_d.items()) |
|
207 | 207 | |
|
208 | 208 | c.trending_languages = json.dumps( |
|
209 | 209 | sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10] |
|
210 | 210 | ) |
|
211 | 211 | last_rev = stats.stat_on_revision + 1 |
|
212 | 212 | c.repo_last_rev = c.db_repo_scm_instance.count()\ |
|
213 | 213 | if c.db_repo_scm_instance.revisions else 0 |
|
214 | 214 | if last_rev == 0 or c.repo_last_rev == 0: |
|
215 | 215 | pass |
|
216 | 216 | else: |
|
217 | 217 | c.stats_percentage = '%.2f' % ((float((last_rev)) / |
|
218 | 218 | c.repo_last_rev) * 100) |
|
219 | 219 | else: |
|
220 | 220 | c.commit_data = json.dumps({}) |
|
221 | 221 | c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]]) |
|
222 | 222 | c.trending_languages = json.dumps({}) |
|
223 | 223 | c.no_data = True |
|
224 | 224 | |
|
225 | 225 | recurse_limit = 500 # don't recurse more than 500 times when parsing |
|
226 | 226 | run_task(get_commits_stats, c.db_repo.repo_name, ts_min_y, |
|
227 | 227 | ts_max_y, recurse_limit) |
|
228 | 228 | return render('summary/statistics.html') |
@@ -1,1298 +1,1298 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.auth |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | authentication and permission libraries |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 4, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | from __future__ import with_statement |
|
28 | 28 | import time |
|
29 | 29 | import os |
|
30 | 30 | import logging |
|
31 | 31 | import traceback |
|
32 | 32 | import hashlib |
|
33 | 33 | import itertools |
|
34 | 34 | import collections |
|
35 | 35 | |
|
36 | 36 | from decorator import decorator |
|
37 | 37 | |
|
38 | 38 | from pylons import url, request |
|
39 | 39 | from pylons.controllers.util import abort, redirect |
|
40 | 40 | from pylons.i18n.translation import _ |
|
41 | 41 | from webhelpers.pylonslib import secure_form |
|
42 | 42 | from sqlalchemy import or_ |
|
43 | 43 | from sqlalchemy.orm.exc import ObjectDeletedError |
|
44 | 44 | from sqlalchemy.orm import joinedload |
|
45 | 45 | |
|
46 | 46 | from kallithea import __platform__, is_windows, is_unix |
|
47 | 47 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
48 | 48 | from kallithea.model import meta |
|
49 | 49 | from kallithea.model.meta import Session |
|
50 | 50 | from kallithea.model.user import UserModel |
|
51 | 51 | from kallithea.model.db import User, Repository, Permission, \ |
|
52 | 52 | UserToPerm, UserGroupRepoToPerm, UserGroupToPerm, UserGroupMember, \ |
|
53 | 53 | RepoGroup, UserGroupRepoGroupToPerm, UserIpMap, UserGroupUserGroupToPerm, \ |
|
54 | 54 | UserGroup, UserApiKeys |
|
55 | 55 | |
|
56 | 56 | from kallithea.lib.utils2 import safe_unicode, aslist |
|
57 | 57 | from kallithea.lib.utils import get_repo_slug, get_repo_group_slug, \ |
|
58 | 58 | get_user_group_slug, conditional_cache |
|
59 | 59 | from kallithea.lib.caching_query import FromCache |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | log = logging.getLogger(__name__) |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class PasswordGenerator(object): |
|
66 | 66 | """ |
|
67 | 67 | This is a simple class for generating password from different sets of |
|
68 | 68 | characters |
|
69 | 69 | usage:: |
|
70 | 70 | |
|
71 | 71 | passwd_gen = PasswordGenerator() |
|
72 | 72 | #print 8-letter password containing only big and small letters |
|
73 | 73 | of alphabet |
|
74 | 74 | passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL) |
|
75 | 75 | """ |
|
76 | 76 | ALPHABETS_NUM = r'''1234567890''' |
|
77 | 77 | ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm''' |
|
78 | 78 | ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM''' |
|
79 | 79 | ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?''' |
|
80 | 80 | ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \ |
|
81 | 81 | + ALPHABETS_NUM + ALPHABETS_SPECIAL |
|
82 | 82 | ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM |
|
83 | 83 | ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL |
|
84 | 84 | ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM |
|
85 | 85 | ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM |
|
86 | 86 | |
|
87 | 87 | def gen_password(self, length, alphabet=ALPHABETS_FULL): |
|
88 | 88 | assert len(alphabet) <= 256, alphabet |
|
89 | 89 | l = [] |
|
90 | 90 | while len(l) < length: |
|
91 | 91 | i = ord(os.urandom(1)) |
|
92 | 92 | if i < len(alphabet): |
|
93 | 93 | l.append(alphabet[i]) |
|
94 | 94 | return ''.join(l) |
|
95 | 95 | |
|
96 | 96 | |
|
97 | 97 | class KallitheaCrypto(object): |
|
98 | 98 | |
|
99 | 99 | @classmethod |
|
100 | 100 | def hash_string(cls, str_): |
|
101 | 101 | """ |
|
102 | 102 | Cryptographic function used for password hashing based on pybcrypt |
|
103 | 103 | or pycrypto in windows |
|
104 | 104 | |
|
105 | 105 | :param password: password to hash |
|
106 | 106 | """ |
|
107 | 107 | if is_windows: |
|
108 | 108 | return hashlib.sha256(str_).hexdigest() |
|
109 | 109 | elif is_unix: |
|
110 | 110 | import bcrypt |
|
111 | 111 | return bcrypt.hashpw(str_, bcrypt.gensalt(10)) |
|
112 | 112 | else: |
|
113 | 113 | raise Exception('Unknown or unsupported platform %s' \ |
|
114 | 114 | % __platform__) |
|
115 | 115 | |
|
116 | 116 | @classmethod |
|
117 | 117 | def hash_check(cls, password, hashed): |
|
118 | 118 | """ |
|
119 | 119 | Checks matching password with it's hashed value, runs different |
|
120 | 120 | implementation based on platform it runs on |
|
121 | 121 | |
|
122 | 122 | :param password: password |
|
123 | 123 | :param hashed: password in hashed form |
|
124 | 124 | """ |
|
125 | 125 | |
|
126 | 126 | if is_windows: |
|
127 | 127 | return hashlib.sha256(password).hexdigest() == hashed |
|
128 | 128 | elif is_unix: |
|
129 | 129 | import bcrypt |
|
130 | 130 | return bcrypt.hashpw(password, hashed) == hashed |
|
131 | 131 | else: |
|
132 | 132 | raise Exception('Unknown or unsupported platform %s' \ |
|
133 | 133 | % __platform__) |
|
134 | 134 | |
|
135 | 135 | |
|
136 | 136 | def get_crypt_password(password): |
|
137 | 137 | return KallitheaCrypto.hash_string(password) |
|
138 | 138 | |
|
139 | 139 | |
|
140 | 140 | def check_password(password, hashed): |
|
141 | 141 | return KallitheaCrypto.hash_check(password, hashed) |
|
142 | 142 | |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | def _cached_perms_data(user_id, user_is_admin, user_inherit_default_permissions, |
|
146 | 146 | explicit, algo): |
|
147 | 147 | RK = 'repositories' |
|
148 | 148 | GK = 'repositories_groups' |
|
149 | 149 | UK = 'user_groups' |
|
150 | 150 | GLOBAL = 'global' |
|
151 | 151 | PERM_WEIGHTS = Permission.PERM_WEIGHTS |
|
152 | 152 | permissions = {RK: {}, GK: {}, UK: {}, GLOBAL: set()} |
|
153 | 153 | |
|
154 | 154 | def _choose_perm(new_perm, cur_perm): |
|
155 | 155 | new_perm_val = PERM_WEIGHTS[new_perm] |
|
156 | 156 | cur_perm_val = PERM_WEIGHTS[cur_perm] |
|
157 | 157 | if algo == 'higherwin': |
|
158 | 158 | if new_perm_val > cur_perm_val: |
|
159 | 159 | return new_perm |
|
160 | 160 | return cur_perm |
|
161 | 161 | elif algo == 'lowerwin': |
|
162 | 162 | if new_perm_val < cur_perm_val: |
|
163 | 163 | return new_perm |
|
164 | 164 | return cur_perm |
|
165 | 165 | |
|
166 | 166 | #====================================================================== |
|
167 | 167 | # fetch default permissions |
|
168 | 168 | #====================================================================== |
|
169 | 169 | default_user = User.get_by_username('default', cache=True) |
|
170 | 170 | default_user_id = default_user.user_id |
|
171 | 171 | |
|
172 | 172 | default_repo_perms = Permission.get_default_perms(default_user_id) |
|
173 | 173 | default_repo_groups_perms = Permission.get_default_group_perms(default_user_id) |
|
174 | 174 | default_user_group_perms = Permission.get_default_user_group_perms(default_user_id) |
|
175 | 175 | |
|
176 | 176 | if user_is_admin: |
|
177 | 177 | #================================================================== |
|
178 | 178 | # admin users have all rights; |
|
179 | 179 | # based on default permissions, just set everything to admin |
|
180 | 180 | #================================================================== |
|
181 | 181 | permissions[GLOBAL].add('hg.admin') |
|
182 | 182 | permissions[GLOBAL].add('hg.create.write_on_repogroup.true') |
|
183 | 183 | |
|
184 | 184 | # repositories |
|
185 | 185 | for perm in default_repo_perms: |
|
186 | 186 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
187 | 187 | p = 'repository.admin' |
|
188 | 188 | permissions[RK][r_k] = p |
|
189 | 189 | |
|
190 | 190 | # repository groups |
|
191 | 191 | for perm in default_repo_groups_perms: |
|
192 | 192 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
193 | 193 | p = 'group.admin' |
|
194 | 194 | permissions[GK][rg_k] = p |
|
195 | 195 | |
|
196 | 196 | # user groups |
|
197 | 197 | for perm in default_user_group_perms: |
|
198 | 198 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
199 | 199 | p = 'usergroup.admin' |
|
200 | 200 | permissions[UK][u_k] = p |
|
201 | 201 | return permissions |
|
202 | 202 | |
|
203 | 203 | #================================================================== |
|
204 | 204 | # SET DEFAULTS GLOBAL, REPOS, REPOSITORY GROUPS |
|
205 | 205 | #================================================================== |
|
206 | 206 | |
|
207 | 207 | # default global permissions taken from the default user |
|
208 | 208 | default_global_perms = UserToPerm.query()\ |
|
209 | 209 | .filter(UserToPerm.user_id == default_user_id)\ |
|
210 | 210 | .options(joinedload(UserToPerm.permission)) |
|
211 | 211 | |
|
212 | 212 | for perm in default_global_perms: |
|
213 | 213 | permissions[GLOBAL].add(perm.permission.permission_name) |
|
214 | 214 | |
|
215 | 215 | # defaults for repositories, taken from default user |
|
216 | 216 | for perm in default_repo_perms: |
|
217 | 217 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
218 | 218 | if perm.Repository.private and not (perm.Repository.user_id == user_id): |
|
219 | 219 | # disable defaults for private repos, |
|
220 | 220 | p = 'repository.none' |
|
221 | 221 | elif perm.Repository.user_id == user_id: |
|
222 | 222 | # set admin if owner |
|
223 | 223 | p = 'repository.admin' |
|
224 | 224 | else: |
|
225 | 225 | p = perm.Permission.permission_name |
|
226 | 226 | |
|
227 | 227 | permissions[RK][r_k] = p |
|
228 | 228 | |
|
229 | 229 | # defaults for repository groups taken from default user permission |
|
230 | 230 | # on given group |
|
231 | 231 | for perm in default_repo_groups_perms: |
|
232 | 232 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
233 | 233 | p = perm.Permission.permission_name |
|
234 | 234 | permissions[GK][rg_k] = p |
|
235 | 235 | |
|
236 | 236 | # defaults for user groups taken from default user permission |
|
237 | 237 | # on given user group |
|
238 | 238 | for perm in default_user_group_perms: |
|
239 | 239 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
240 | 240 | p = perm.Permission.permission_name |
|
241 | 241 | permissions[UK][u_k] = p |
|
242 | 242 | |
|
243 | 243 | #====================================================================== |
|
244 | 244 | # !! OVERRIDE GLOBALS !! with user permissions if any found |
|
245 | 245 | #====================================================================== |
|
246 | 246 | # those can be configured from groups or users explicitly |
|
247 | 247 | _configurable = set([ |
|
248 | 248 | 'hg.fork.none', 'hg.fork.repository', |
|
249 | 249 | 'hg.create.none', 'hg.create.repository', |
|
250 | 250 | 'hg.usergroup.create.false', 'hg.usergroup.create.true' |
|
251 | 251 | ]) |
|
252 | 252 | |
|
253 | 253 | # USER GROUPS comes first |
|
254 | 254 | # user group global permissions |
|
255 | 255 | user_perms_from_users_groups = Session().query(UserGroupToPerm)\ |
|
256 | 256 | .options(joinedload(UserGroupToPerm.permission))\ |
|
257 | 257 | .join((UserGroupMember, UserGroupToPerm.users_group_id == |
|
258 | 258 | UserGroupMember.users_group_id))\ |
|
259 | 259 | .filter(UserGroupMember.user_id == user_id)\ |
|
260 | 260 | .join((UserGroup, UserGroupMember.users_group_id == |
|
261 | 261 | UserGroup.users_group_id))\ |
|
262 | 262 | .filter(UserGroup.users_group_active == True)\ |
|
263 | 263 | .order_by(UserGroupToPerm.users_group_id)\ |
|
264 | 264 | .all() |
|
265 | 265 | # need to group here by groups since user can be in more than |
|
266 | 266 | # one group |
|
267 | 267 | _grouped = [[x, list(y)] for x, y in |
|
268 | 268 | itertools.groupby(user_perms_from_users_groups, |
|
269 | 269 | lambda x:x.users_group)] |
|
270 | 270 | for gr, perms in _grouped: |
|
271 | 271 | # since user can be in multiple groups iterate over them and |
|
272 | 272 | # select the lowest permissions first (more explicit) |
|
273 | 273 | ##TODO: do this^^ |
|
274 | 274 | if not gr.inherit_default_permissions: |
|
275 | 275 | # NEED TO IGNORE all configurable permissions and |
|
276 | 276 | # replace them with explicitly set |
|
277 | 277 | permissions[GLOBAL] = permissions[GLOBAL]\ |
|
278 | 278 | .difference(_configurable) |
|
279 | 279 | for perm in perms: |
|
280 | 280 | permissions[GLOBAL].add(perm.permission.permission_name) |
|
281 | 281 | |
|
282 | 282 | # user specific global permissions |
|
283 | 283 | user_perms = Session().query(UserToPerm)\ |
|
284 | 284 | .options(joinedload(UserToPerm.permission))\ |
|
285 | 285 | .filter(UserToPerm.user_id == user_id).all() |
|
286 | 286 | |
|
287 | 287 | if not user_inherit_default_permissions: |
|
288 | 288 | # NEED TO IGNORE all configurable permissions and |
|
289 | 289 | # replace them with explicitly set |
|
290 | 290 | permissions[GLOBAL] = permissions[GLOBAL]\ |
|
291 | 291 | .difference(_configurable) |
|
292 | 292 | |
|
293 | 293 | for perm in user_perms: |
|
294 | 294 | permissions[GLOBAL].add(perm.permission.permission_name) |
|
295 | 295 | ## END GLOBAL PERMISSIONS |
|
296 | 296 | |
|
297 | 297 | #====================================================================== |
|
298 | 298 | # !! PERMISSIONS FOR REPOSITORIES !! |
|
299 | 299 | #====================================================================== |
|
300 | 300 | #====================================================================== |
|
301 | 301 | # check if user is part of user groups for this repository and |
|
302 | 302 | # fill in his permission from it. _choose_perm decides of which |
|
303 | 303 | # permission should be selected based on selected method |
|
304 | 304 | #====================================================================== |
|
305 | 305 | |
|
306 | 306 | # user group for repositories permissions |
|
307 | 307 | user_repo_perms_from_users_groups = \ |
|
308 | 308 | Session().query(UserGroupRepoToPerm, Permission, Repository,)\ |
|
309 | 309 | .join((Repository, UserGroupRepoToPerm.repository_id == |
|
310 | 310 | Repository.repo_id))\ |
|
311 | 311 | .join((Permission, UserGroupRepoToPerm.permission_id == |
|
312 | 312 | Permission.permission_id))\ |
|
313 | 313 | .join((UserGroup, UserGroupRepoToPerm.users_group_id == |
|
314 | 314 | UserGroup.users_group_id))\ |
|
315 | 315 | .filter(UserGroup.users_group_active == True)\ |
|
316 | 316 | .join((UserGroupMember, UserGroupRepoToPerm.users_group_id == |
|
317 | 317 | UserGroupMember.users_group_id))\ |
|
318 | 318 | .filter(UserGroupMember.user_id == user_id)\ |
|
319 | 319 | .all() |
|
320 | 320 | |
|
321 | 321 | multiple_counter = collections.defaultdict(int) |
|
322 | 322 | for perm in user_repo_perms_from_users_groups: |
|
323 | 323 | r_k = perm.UserGroupRepoToPerm.repository.repo_name |
|
324 | 324 | multiple_counter[r_k] += 1 |
|
325 | 325 | p = perm.Permission.permission_name |
|
326 | 326 | cur_perm = permissions[RK][r_k] |
|
327 | 327 | |
|
328 | 328 | if perm.Repository.user_id == user_id: |
|
329 | 329 | # set admin if owner |
|
330 | 330 | p = 'repository.admin' |
|
331 | 331 | else: |
|
332 | 332 | if multiple_counter[r_k] > 1: |
|
333 | 333 | p = _choose_perm(p, cur_perm) |
|
334 | 334 | permissions[RK][r_k] = p |
|
335 | 335 | |
|
336 | 336 | # user explicit permissions for repositories, overrides any specified |
|
337 | 337 | # by the group permission |
|
338 | 338 | user_repo_perms = Permission.get_default_perms(user_id) |
|
339 | 339 | for perm in user_repo_perms: |
|
340 | 340 | r_k = perm.UserRepoToPerm.repository.repo_name |
|
341 | 341 | cur_perm = permissions[RK][r_k] |
|
342 | 342 | # set admin if owner |
|
343 | 343 | if perm.Repository.user_id == user_id: |
|
344 | 344 | p = 'repository.admin' |
|
345 | 345 | else: |
|
346 | 346 | p = perm.Permission.permission_name |
|
347 | 347 | if not explicit: |
|
348 | 348 | p = _choose_perm(p, cur_perm) |
|
349 | 349 | permissions[RK][r_k] = p |
|
350 | 350 | |
|
351 | 351 | #====================================================================== |
|
352 | 352 | # !! PERMISSIONS FOR REPOSITORY GROUPS !! |
|
353 | 353 | #====================================================================== |
|
354 | 354 | #====================================================================== |
|
355 | 355 | # check if user is part of user groups for this repository groups and |
|
356 | 356 | # fill in his permission from it. _choose_perm decides of which |
|
357 | 357 | # permission should be selected based on selected method |
|
358 | 358 | #====================================================================== |
|
359 | 359 | # user group for repo groups permissions |
|
360 | 360 | user_repo_group_perms_from_users_groups = \ |
|
361 | 361 | Session().query(UserGroupRepoGroupToPerm, Permission, RepoGroup)\ |
|
362 | 362 | .join((RepoGroup, UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
363 | 363 | .join((Permission, UserGroupRepoGroupToPerm.permission_id |
|
364 | 364 | == Permission.permission_id))\ |
|
365 | 365 | .join((UserGroup, UserGroupRepoGroupToPerm.users_group_id == |
|
366 | 366 | UserGroup.users_group_id))\ |
|
367 | 367 | .filter(UserGroup.users_group_active == True)\ |
|
368 | 368 | .join((UserGroupMember, UserGroupRepoGroupToPerm.users_group_id |
|
369 | 369 | == UserGroupMember.users_group_id))\ |
|
370 | 370 | .filter(UserGroupMember.user_id == user_id)\ |
|
371 | 371 | .all() |
|
372 | 372 | |
|
373 | 373 | multiple_counter = collections.defaultdict(int) |
|
374 | 374 | for perm in user_repo_group_perms_from_users_groups: |
|
375 | 375 | g_k = perm.UserGroupRepoGroupToPerm.group.group_name |
|
376 | 376 | multiple_counter[g_k] += 1 |
|
377 | 377 | p = perm.Permission.permission_name |
|
378 | 378 | cur_perm = permissions[GK][g_k] |
|
379 | 379 | if multiple_counter[g_k] > 1: |
|
380 | 380 | p = _choose_perm(p, cur_perm) |
|
381 | 381 | permissions[GK][g_k] = p |
|
382 | 382 | |
|
383 | 383 | # user explicit permissions for repository groups |
|
384 | 384 | user_repo_groups_perms = Permission.get_default_group_perms(user_id) |
|
385 | 385 | for perm in user_repo_groups_perms: |
|
386 | 386 | rg_k = perm.UserRepoGroupToPerm.group.group_name |
|
387 | 387 | p = perm.Permission.permission_name |
|
388 | 388 | cur_perm = permissions[GK][rg_k] |
|
389 | 389 | if not explicit: |
|
390 | 390 | p = _choose_perm(p, cur_perm) |
|
391 | 391 | permissions[GK][rg_k] = p |
|
392 | 392 | |
|
393 | 393 | #====================================================================== |
|
394 | 394 | # !! PERMISSIONS FOR USER GROUPS !! |
|
395 | 395 | #====================================================================== |
|
396 | 396 | # user group for user group permissions |
|
397 | 397 | user_group_user_groups_perms = \ |
|
398 | 398 | Session().query(UserGroupUserGroupToPerm, Permission, UserGroup)\ |
|
399 | 399 | .join((UserGroup, UserGroupUserGroupToPerm.target_user_group_id |
|
400 | 400 | == UserGroup.users_group_id))\ |
|
401 | 401 | .join((Permission, UserGroupUserGroupToPerm.permission_id |
|
402 | 402 | == Permission.permission_id))\ |
|
403 | 403 | .join((UserGroupMember, UserGroupUserGroupToPerm.user_group_id |
|
404 | 404 | == UserGroupMember.users_group_id))\ |
|
405 | 405 | .filter(UserGroupMember.user_id == user_id)\ |
|
406 | 406 | .join((UserGroup, UserGroupMember.users_group_id == |
|
407 | 407 | UserGroup.users_group_id), aliased=True, from_joinpoint=True)\ |
|
408 | 408 | .filter(UserGroup.users_group_active == True)\ |
|
409 | 409 | .all() |
|
410 | 410 | |
|
411 | 411 | multiple_counter = collections.defaultdict(int) |
|
412 | 412 | for perm in user_group_user_groups_perms: |
|
413 | 413 | g_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name |
|
414 | 414 | multiple_counter[g_k] += 1 |
|
415 | 415 | p = perm.Permission.permission_name |
|
416 | 416 | cur_perm = permissions[UK][g_k] |
|
417 | 417 | if multiple_counter[g_k] > 1: |
|
418 | 418 | p = _choose_perm(p, cur_perm) |
|
419 | 419 | permissions[UK][g_k] = p |
|
420 | 420 | |
|
421 | 421 | #user explicit permission for user groups |
|
422 | 422 | user_user_groups_perms = Permission.get_default_user_group_perms(user_id) |
|
423 | 423 | for perm in user_user_groups_perms: |
|
424 | 424 | u_k = perm.UserUserGroupToPerm.user_group.users_group_name |
|
425 | 425 | p = perm.Permission.permission_name |
|
426 | 426 | cur_perm = permissions[UK][u_k] |
|
427 | 427 | if not explicit: |
|
428 | 428 | p = _choose_perm(p, cur_perm) |
|
429 | 429 | permissions[UK][u_k] = p |
|
430 | 430 | |
|
431 | 431 | return permissions |
|
432 | 432 | |
|
433 | 433 | |
|
434 | 434 | def allowed_api_access(controller_name, whitelist=None, api_key=None): |
|
435 | 435 | """ |
|
436 | 436 | Check if given controller_name is in whitelist API access |
|
437 | 437 | """ |
|
438 | 438 | if not whitelist: |
|
439 | 439 | from kallithea import CONFIG |
|
440 | 440 | whitelist = aslist(CONFIG.get('api_access_controllers_whitelist'), |
|
441 | 441 | sep=',') |
|
442 |
log.debug('whitelist of API access is: %s' |
|
|
442 | log.debug('whitelist of API access is: %s', whitelist) | |
|
443 | 443 | api_access_valid = controller_name in whitelist |
|
444 | 444 | if api_access_valid: |
|
445 |
log.debug('controller:%s is in API whitelist' |
|
|
445 | log.debug('controller:%s is in API whitelist', controller_name) | |
|
446 | 446 | else: |
|
447 | 447 | msg = 'controller: %s is *NOT* in API whitelist' % (controller_name) |
|
448 | 448 | if api_key: |
|
449 | 449 | #if we use API key and don't have access it's a warning |
|
450 | 450 | log.warning(msg) |
|
451 | 451 | else: |
|
452 | 452 | log.debug(msg) |
|
453 | 453 | return api_access_valid |
|
454 | 454 | |
|
455 | 455 | |
|
456 | 456 | class AuthUser(object): |
|
457 | 457 | """ |
|
458 | 458 | Represents a Kallithea user, including various authentication and |
|
459 | 459 | authorization information. Typically used to store the current user, |
|
460 | 460 | but is also used as a generic user information data structure in |
|
461 | 461 | parts of the code, e.g. user management. |
|
462 | 462 | |
|
463 | 463 | Constructed from a database `User` object, a user ID or cookie dict, |
|
464 | 464 | it looks up the user (if needed) and copies all attributes to itself, |
|
465 | 465 | adding various non-persistent data. If lookup fails but anonymous |
|
466 | 466 | access to Kallithea is enabled, the default user is loaded instead. |
|
467 | 467 | |
|
468 | 468 | `AuthUser` does not by itself authenticate users and the constructor |
|
469 | 469 | sets the `is_authenticated` field to False, except when falling back |
|
470 | 470 | to the default anonymous user (if enabled). It's up to other parts |
|
471 | 471 | of the code to check e.g. if a supplied password is correct, and if |
|
472 | 472 | so, set `is_authenticated` to True. |
|
473 | 473 | |
|
474 | 474 | However, `AuthUser` does refuse to load a user that is not `active`. |
|
475 | 475 | """ |
|
476 | 476 | |
|
477 | 477 | def __init__(self, user_id=None, dbuser=None, |
|
478 | 478 | is_external_auth=False): |
|
479 | 479 | |
|
480 | 480 | self.is_authenticated = False |
|
481 | 481 | self.is_external_auth = is_external_auth |
|
482 | 482 | |
|
483 | 483 | user_model = UserModel() |
|
484 | 484 | self.anonymous_user = User.get_default_user(cache=True) |
|
485 | 485 | |
|
486 | 486 | # These attributes will be overriden by fill_data, below, unless the |
|
487 | 487 | # requested user cannot be found and the default anonymous user is |
|
488 | 488 | # not enabled. |
|
489 | 489 | self.user_id = None |
|
490 | 490 | self.username = None |
|
491 | 491 | self.api_key = None |
|
492 | 492 | self.name = '' |
|
493 | 493 | self.lastname = '' |
|
494 | 494 | self.email = '' |
|
495 | 495 | self.admin = False |
|
496 | 496 | self.inherit_default_permissions = False |
|
497 | 497 | |
|
498 | 498 | # Look up database user, if necessary. |
|
499 | 499 | if user_id is not None: |
|
500 |
log.debug('Auth User lookup by USER ID %s' |
|
|
500 | log.debug('Auth User lookup by USER ID %s', user_id) | |
|
501 | 501 | dbuser = user_model.get(user_id) |
|
502 | 502 | else: |
|
503 | 503 | # Note: dbuser is allowed to be None. |
|
504 | 504 | log.debug('Auth User lookup by database user %s', dbuser) |
|
505 | 505 | |
|
506 | 506 | is_user_loaded = self._fill_data(dbuser) |
|
507 | 507 | |
|
508 | 508 | # If user cannot be found, try falling back to anonymous. |
|
509 | 509 | if not is_user_loaded: |
|
510 | 510 | is_user_loaded = self._fill_data(self.anonymous_user) |
|
511 | 511 | |
|
512 | 512 | # The anonymous user is always "logged in". |
|
513 | 513 | if self.user_id == self.anonymous_user.user_id: |
|
514 | 514 | self.is_authenticated = True |
|
515 | 515 | |
|
516 | 516 | if not self.username: |
|
517 | 517 | self.username = 'None' |
|
518 | 518 | |
|
519 |
log.debug('Auth User is now %s' |
|
|
519 | log.debug('Auth User is now %s', self) | |
|
520 | 520 | |
|
521 | 521 | def _fill_data(self, dbuser): |
|
522 | 522 | """ |
|
523 | 523 | Copies database fields from a `db.User` to this `AuthUser`. Does |
|
524 | 524 | not copy `api_keys` and `permissions` attributes. |
|
525 | 525 | |
|
526 | 526 | Checks that `dbuser` is `active` (and not None) before copying; |
|
527 | 527 | returns True on success. |
|
528 | 528 | """ |
|
529 | 529 | if dbuser is not None and dbuser.active: |
|
530 | 530 | log.debug('filling %s data', dbuser) |
|
531 | 531 | for k, v in dbuser.get_dict().iteritems(): |
|
532 | 532 | assert k not in ['api_keys', 'permissions'] |
|
533 | 533 | setattr(self, k, v) |
|
534 | 534 | return True |
|
535 | 535 | return False |
|
536 | 536 | |
|
537 | 537 | @LazyProperty |
|
538 | 538 | def permissions(self): |
|
539 | 539 | return self.__get_perms(user=self, cache=False) |
|
540 | 540 | |
|
541 | 541 | @property |
|
542 | 542 | def api_keys(self): |
|
543 | 543 | return self._get_api_keys() |
|
544 | 544 | |
|
545 | 545 | def __get_perms(self, user, explicit=True, algo='higherwin', cache=False): |
|
546 | 546 | """ |
|
547 | 547 | Fills user permission attribute with permissions taken from database |
|
548 | 548 | works for permissions given for repositories, and for permissions that |
|
549 | 549 | are granted to groups |
|
550 | 550 | |
|
551 | 551 | :param user: `AuthUser` instance |
|
552 | 552 | :param explicit: In case there are permissions both for user and a group |
|
553 | 553 | that user is part of, explicit flag will define if user will |
|
554 | 554 | explicitly override permissions from group, if it's False it will |
|
555 | 555 | make decision based on the algo |
|
556 | 556 | :param algo: algorithm to decide what permission should be choose if |
|
557 | 557 | it's multiple defined, eg user in two different groups. It also |
|
558 | 558 | decides if explicit flag is turned off how to specify the permission |
|
559 | 559 | for case when user is in a group + have defined separate permission |
|
560 | 560 | """ |
|
561 | 561 | user_id = user.user_id |
|
562 | 562 | user_is_admin = user.is_admin |
|
563 | 563 | user_inherit_default_permissions = user.inherit_default_permissions |
|
564 | 564 | |
|
565 | 565 | log.debug('Getting PERMISSION tree') |
|
566 | 566 | compute = conditional_cache('short_term', 'cache_desc', |
|
567 | 567 | condition=cache, func=_cached_perms_data) |
|
568 | 568 | return compute(user_id, user_is_admin, |
|
569 | 569 | user_inherit_default_permissions, explicit, algo) |
|
570 | 570 | |
|
571 | 571 | def _get_api_keys(self): |
|
572 | 572 | api_keys = [self.api_key] |
|
573 | 573 | for api_key in UserApiKeys.query()\ |
|
574 | 574 | .filter(UserApiKeys.user_id == self.user_id)\ |
|
575 | 575 | .filter(or_(UserApiKeys.expires == -1, |
|
576 | 576 | UserApiKeys.expires >= time.time())).all(): |
|
577 | 577 | api_keys.append(api_key.api_key) |
|
578 | 578 | |
|
579 | 579 | return api_keys |
|
580 | 580 | |
|
581 | 581 | @property |
|
582 | 582 | def is_admin(self): |
|
583 | 583 | return self.admin |
|
584 | 584 | |
|
585 | 585 | @property |
|
586 | 586 | def repositories_admin(self): |
|
587 | 587 | """ |
|
588 | 588 | Returns list of repositories you're an admin of |
|
589 | 589 | """ |
|
590 | 590 | return [x[0] for x in self.permissions['repositories'].iteritems() |
|
591 | 591 | if x[1] == 'repository.admin'] |
|
592 | 592 | |
|
593 | 593 | @property |
|
594 | 594 | def repository_groups_admin(self): |
|
595 | 595 | """ |
|
596 | 596 | Returns list of repository groups you're an admin of |
|
597 | 597 | """ |
|
598 | 598 | return [x[0] for x in self.permissions['repositories_groups'].iteritems() |
|
599 | 599 | if x[1] == 'group.admin'] |
|
600 | 600 | |
|
601 | 601 | @property |
|
602 | 602 | def user_groups_admin(self): |
|
603 | 603 | """ |
|
604 | 604 | Returns list of user groups you're an admin of |
|
605 | 605 | """ |
|
606 | 606 | return [x[0] for x in self.permissions['user_groups'].iteritems() |
|
607 | 607 | if x[1] == 'usergroup.admin'] |
|
608 | 608 | |
|
609 | 609 | @staticmethod |
|
610 | 610 | def check_ip_allowed(user, ip_addr): |
|
611 | 611 | """ |
|
612 | 612 | Check if the given IP address (a `str`) is allowed for the given |
|
613 | 613 | user (an `AuthUser` or `db.User`). |
|
614 | 614 | """ |
|
615 | 615 | allowed_ips = AuthUser.get_allowed_ips(user.user_id, cache=True, |
|
616 | 616 | inherit_from_default=user.inherit_default_permissions) |
|
617 | 617 | if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips): |
|
618 |
log.debug('IP:%s is in range of %s' |
|
|
618 | log.debug('IP:%s is in range of %s', ip_addr, allowed_ips) | |
|
619 | 619 | return True |
|
620 | 620 | else: |
|
621 | 621 | log.info('Access for IP:%s forbidden, ' |
|
622 | 622 | 'not in %s' % (ip_addr, allowed_ips)) |
|
623 | 623 | return False |
|
624 | 624 | |
|
625 | 625 | def __repr__(self): |
|
626 | 626 | return "<AuthUser('id:%s[%s] auth:%s')>"\ |
|
627 | 627 | % (self.user_id, self.username, self.is_authenticated) |
|
628 | 628 | |
|
629 | 629 | def set_authenticated(self, authenticated=True): |
|
630 | 630 | if self.user_id != self.anonymous_user.user_id: |
|
631 | 631 | self.is_authenticated = authenticated |
|
632 | 632 | |
|
633 | 633 | def to_cookie(self): |
|
634 | 634 | """ Serializes this login session to a cookie `dict`. """ |
|
635 | 635 | return { |
|
636 | 636 | 'user_id': self.user_id, |
|
637 | 637 | 'is_authenticated': self.is_authenticated, |
|
638 | 638 | 'is_external_auth': self.is_external_auth, |
|
639 | 639 | } |
|
640 | 640 | |
|
641 | 641 | @staticmethod |
|
642 | 642 | def from_cookie(cookie): |
|
643 | 643 | """ |
|
644 | 644 | Deserializes an `AuthUser` from a cookie `dict`. |
|
645 | 645 | """ |
|
646 | 646 | |
|
647 | 647 | au = AuthUser( |
|
648 | 648 | user_id=cookie.get('user_id'), |
|
649 | 649 | is_external_auth=cookie.get('is_external_auth', False), |
|
650 | 650 | ) |
|
651 | 651 | if not au.is_authenticated and au.user_id is not None: |
|
652 | 652 | # user is not authenticated and not empty |
|
653 | 653 | au.set_authenticated(cookie.get('is_authenticated')) |
|
654 | 654 | return au |
|
655 | 655 | |
|
656 | 656 | @classmethod |
|
657 | 657 | def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False): |
|
658 | 658 | _set = set() |
|
659 | 659 | |
|
660 | 660 | if inherit_from_default: |
|
661 | 661 | default_ips = UserIpMap.query().filter(UserIpMap.user == |
|
662 | 662 | User.get_default_user(cache=True)) |
|
663 | 663 | if cache: |
|
664 | 664 | default_ips = default_ips.options(FromCache("sql_cache_short", |
|
665 | 665 | "get_user_ips_default")) |
|
666 | 666 | |
|
667 | 667 | # populate from default user |
|
668 | 668 | for ip in default_ips: |
|
669 | 669 | try: |
|
670 | 670 | _set.add(ip.ip_addr) |
|
671 | 671 | except ObjectDeletedError: |
|
672 | 672 | # since we use heavy caching sometimes it happens that we get |
|
673 | 673 | # deleted objects here, we just skip them |
|
674 | 674 | pass |
|
675 | 675 | |
|
676 | 676 | user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id) |
|
677 | 677 | if cache: |
|
678 | 678 | user_ips = user_ips.options(FromCache("sql_cache_short", |
|
679 | 679 | "get_user_ips_%s" % user_id)) |
|
680 | 680 | |
|
681 | 681 | for ip in user_ips: |
|
682 | 682 | try: |
|
683 | 683 | _set.add(ip.ip_addr) |
|
684 | 684 | except ObjectDeletedError: |
|
685 | 685 | # since we use heavy caching sometimes it happens that we get |
|
686 | 686 | # deleted objects here, we just skip them |
|
687 | 687 | pass |
|
688 | 688 | return _set or set(['0.0.0.0/0', '::/0']) |
|
689 | 689 | |
|
690 | 690 | |
|
691 | 691 | def set_available_permissions(config): |
|
692 | 692 | """ |
|
693 | 693 | This function will propagate pylons globals with all available defined |
|
694 | 694 | permission given in db. We don't want to check each time from db for new |
|
695 | 695 | permissions since adding a new permission also requires application restart |
|
696 | 696 | ie. to decorate new views with the newly created permission |
|
697 | 697 | |
|
698 | 698 | :param config: current pylons config instance |
|
699 | 699 | |
|
700 | 700 | """ |
|
701 | 701 | log.info('getting information about all available permissions') |
|
702 | 702 | try: |
|
703 | 703 | sa = meta.Session |
|
704 | 704 | all_perms = sa.query(Permission).all() |
|
705 | 705 | config['available_permissions'] = [x.permission_name for x in all_perms] |
|
706 | 706 | finally: |
|
707 | 707 | meta.Session.remove() |
|
708 | 708 | |
|
709 | 709 | |
|
710 | 710 | #============================================================================== |
|
711 | 711 | # CHECK DECORATORS |
|
712 | 712 | #============================================================================== |
|
713 | 713 | |
|
714 | 714 | def redirect_to_login(message=None): |
|
715 | 715 | from kallithea.lib import helpers as h |
|
716 | 716 | p = url.current() |
|
717 | 717 | if message: |
|
718 | 718 | h.flash(h.literal(message), category='warning') |
|
719 |
log.debug('Redirecting to login page, origin: %s' |
|
|
719 | log.debug('Redirecting to login page, origin: %s', p) | |
|
720 | 720 | return redirect(url('login_home', came_from=p, **request.GET)) |
|
721 | 721 | |
|
722 | 722 | class LoginRequired(object): |
|
723 | 723 | """ |
|
724 | 724 | Must be logged in to execute this function else |
|
725 | 725 | redirect to login page |
|
726 | 726 | |
|
727 | 727 | :param api_access: if enabled this checks only for valid auth token |
|
728 | 728 | and grants access based on valid token |
|
729 | 729 | """ |
|
730 | 730 | |
|
731 | 731 | def __init__(self, api_access=False): |
|
732 | 732 | self.api_access = api_access |
|
733 | 733 | |
|
734 | 734 | def __call__(self, func): |
|
735 | 735 | return decorator(self.__wrapper, func) |
|
736 | 736 | |
|
737 | 737 | def __wrapper(self, func, *fargs, **fkwargs): |
|
738 | 738 | controller = fargs[0] |
|
739 | 739 | user = controller.authuser |
|
740 | 740 | loc = "%s:%s" % (controller.__class__.__name__, func.__name__) |
|
741 |
log.debug('Checking access for user %s @ %s' |
|
|
741 | log.debug('Checking access for user %s @ %s', user, loc) | |
|
742 | 742 | |
|
743 | 743 | if not AuthUser.check_ip_allowed(user, controller.ip_addr): |
|
744 | 744 | return redirect_to_login(_('IP %s not allowed') % controller.ip_addr) |
|
745 | 745 | |
|
746 | 746 | # check if we used an API key and it's a valid one |
|
747 | 747 | api_key = request.GET.get('api_key') |
|
748 | 748 | if api_key is not None: |
|
749 | 749 | # explicit controller is enabled or API is in our whitelist |
|
750 | 750 | if self.api_access or allowed_api_access(loc, api_key=api_key): |
|
751 | 751 | if api_key in user.api_keys: |
|
752 | log.info('user %s authenticated with API key ****%s @ %s' | |
|
753 |
|
|
|
752 | log.info('user %s authenticated with API key ****%s @ %s', | |
|
753 | user, api_key[-4:], loc) | |
|
754 | 754 | return func(*fargs, **fkwargs) |
|
755 | 755 | else: |
|
756 |
log.warning('API key ****%s is NOT valid' |
|
|
756 | log.warning('API key ****%s is NOT valid', api_key[-4:]) | |
|
757 | 757 | return redirect_to_login(_('Invalid API key')) |
|
758 | 758 | else: |
|
759 | 759 | # controller does not allow API access |
|
760 |
log.warning('API access to %s is not allowed' |
|
|
760 | log.warning('API access to %s is not allowed', loc) | |
|
761 | 761 | return abort(403) |
|
762 | 762 | |
|
763 | 763 | # CSRF protection - POSTs with session auth must contain correct token |
|
764 | 764 | if request.POST and user.is_authenticated: |
|
765 | 765 | token = request.POST.get(secure_form.token_key) |
|
766 | 766 | if not token or token != secure_form.authentication_token(): |
|
767 | 767 | log.error('CSRF check failed') |
|
768 | 768 | return abort(403) |
|
769 | 769 | |
|
770 | 770 | # regular user authentication |
|
771 | 771 | if user.is_authenticated: |
|
772 |
log.info('user %s authenticated with regular auth @ %s' |
|
|
772 | log.info('user %s authenticated with regular auth @ %s', user, loc) | |
|
773 | 773 | return func(*fargs, **fkwargs) |
|
774 | 774 | else: |
|
775 |
log.warning('user %s NOT authenticated with regular auth @ %s' |
|
|
775 | log.warning('user %s NOT authenticated with regular auth @ %s', user, loc) | |
|
776 | 776 | return redirect_to_login() |
|
777 | 777 | |
|
778 | 778 | class NotAnonymous(object): |
|
779 | 779 | """ |
|
780 | 780 | Must be logged in to execute this function else |
|
781 | 781 | redirect to login page""" |
|
782 | 782 | |
|
783 | 783 | def __call__(self, func): |
|
784 | 784 | return decorator(self.__wrapper, func) |
|
785 | 785 | |
|
786 | 786 | def __wrapper(self, func, *fargs, **fkwargs): |
|
787 | 787 | cls = fargs[0] |
|
788 | 788 | self.user = cls.authuser |
|
789 | 789 | |
|
790 |
log.debug('Checking if user is not anonymous @%s' |
|
|
790 | log.debug('Checking if user is not anonymous @%s', cls) | |
|
791 | 791 | |
|
792 | 792 | anonymous = self.user.username == User.DEFAULT_USER |
|
793 | 793 | |
|
794 | 794 | if anonymous: |
|
795 | 795 | return redirect_to_login(_('You need to be a registered user to ' |
|
796 | 796 | 'perform this action')) |
|
797 | 797 | else: |
|
798 | 798 | return func(*fargs, **fkwargs) |
|
799 | 799 | |
|
800 | 800 | |
|
801 | 801 | class PermsDecorator(object): |
|
802 | 802 | """Base class for controller decorators""" |
|
803 | 803 | |
|
804 | 804 | def __init__(self, *required_perms): |
|
805 | 805 | self.required_perms = set(required_perms) |
|
806 | 806 | self.user_perms = None |
|
807 | 807 | |
|
808 | 808 | def __call__(self, func): |
|
809 | 809 | return decorator(self.__wrapper, func) |
|
810 | 810 | |
|
811 | 811 | def __wrapper(self, func, *fargs, **fkwargs): |
|
812 | 812 | cls = fargs[0] |
|
813 | 813 | self.user = cls.authuser |
|
814 | 814 | self.user_perms = self.user.permissions |
|
815 | 815 | log.debug('checking %s permissions %s for %s %s', |
|
816 | 816 |
|
|
817 | 817 | |
|
818 | 818 | if self.check_permissions(): |
|
819 |
log.debug('Permission granted for %s %s' |
|
|
819 | log.debug('Permission granted for %s %s', cls, self.user) | |
|
820 | 820 | return func(*fargs, **fkwargs) |
|
821 | 821 | |
|
822 | 822 | else: |
|
823 |
log.debug('Permission denied for %s %s' |
|
|
823 | log.debug('Permission denied for %s %s', cls, self.user) | |
|
824 | 824 | anonymous = self.user.username == User.DEFAULT_USER |
|
825 | 825 | |
|
826 | 826 | if anonymous: |
|
827 | 827 | return redirect_to_login(_('You need to be signed in to view this page')) |
|
828 | 828 | else: |
|
829 | 829 | # redirect with forbidden ret code |
|
830 | 830 | return abort(403) |
|
831 | 831 | |
|
832 | 832 | def check_permissions(self): |
|
833 | 833 | """Dummy function for overriding""" |
|
834 | 834 | raise Exception('You have to write this function in child class') |
|
835 | 835 | |
|
836 | 836 | |
|
837 | 837 | class HasPermissionAllDecorator(PermsDecorator): |
|
838 | 838 | """ |
|
839 | 839 | Checks for access permission for all given predicates. All of them |
|
840 | 840 | have to be meet in order to fulfill the request |
|
841 | 841 | """ |
|
842 | 842 | |
|
843 | 843 | def check_permissions(self): |
|
844 | 844 | if self.required_perms.issubset(self.user_perms.get('global')): |
|
845 | 845 | return True |
|
846 | 846 | return False |
|
847 | 847 | |
|
848 | 848 | |
|
849 | 849 | class HasPermissionAnyDecorator(PermsDecorator): |
|
850 | 850 | """ |
|
851 | 851 | Checks for access permission for any of given predicates. In order to |
|
852 | 852 | fulfill the request any of predicates must be meet |
|
853 | 853 | """ |
|
854 | 854 | |
|
855 | 855 | def check_permissions(self): |
|
856 | 856 | if self.required_perms.intersection(self.user_perms.get('global')): |
|
857 | 857 | return True |
|
858 | 858 | return False |
|
859 | 859 | |
|
860 | 860 | |
|
861 | 861 | class HasRepoPermissionAllDecorator(PermsDecorator): |
|
862 | 862 | """ |
|
863 | 863 | Checks for access permission for all given predicates for specific |
|
864 | 864 | repository. All of them have to be meet in order to fulfill the request |
|
865 | 865 | """ |
|
866 | 866 | |
|
867 | 867 | def check_permissions(self): |
|
868 | 868 | repo_name = get_repo_slug(request) |
|
869 | 869 | try: |
|
870 | 870 | user_perms = set([self.user_perms['repositories'][repo_name]]) |
|
871 | 871 | except KeyError: |
|
872 | 872 | return False |
|
873 | 873 | if self.required_perms.issubset(user_perms): |
|
874 | 874 | return True |
|
875 | 875 | return False |
|
876 | 876 | |
|
877 | 877 | |
|
878 | 878 | class HasRepoPermissionAnyDecorator(PermsDecorator): |
|
879 | 879 | """ |
|
880 | 880 | Checks for access permission for any of given predicates for specific |
|
881 | 881 | repository. In order to fulfill the request any of predicates must be meet |
|
882 | 882 | """ |
|
883 | 883 | |
|
884 | 884 | def check_permissions(self): |
|
885 | 885 | repo_name = get_repo_slug(request) |
|
886 | 886 | try: |
|
887 | 887 | user_perms = set([self.user_perms['repositories'][repo_name]]) |
|
888 | 888 | except KeyError: |
|
889 | 889 | return False |
|
890 | 890 | |
|
891 | 891 | if self.required_perms.intersection(user_perms): |
|
892 | 892 | return True |
|
893 | 893 | return False |
|
894 | 894 | |
|
895 | 895 | |
|
896 | 896 | class HasRepoGroupPermissionAllDecorator(PermsDecorator): |
|
897 | 897 | """ |
|
898 | 898 | Checks for access permission for all given predicates for specific |
|
899 | 899 | repository group. All of them have to be meet in order to fulfill the request |
|
900 | 900 | """ |
|
901 | 901 | |
|
902 | 902 | def check_permissions(self): |
|
903 | 903 | group_name = get_repo_group_slug(request) |
|
904 | 904 | try: |
|
905 | 905 | user_perms = set([self.user_perms['repositories_groups'][group_name]]) |
|
906 | 906 | except KeyError: |
|
907 | 907 | return False |
|
908 | 908 | |
|
909 | 909 | if self.required_perms.issubset(user_perms): |
|
910 | 910 | return True |
|
911 | 911 | return False |
|
912 | 912 | |
|
913 | 913 | |
|
914 | 914 | class HasRepoGroupPermissionAnyDecorator(PermsDecorator): |
|
915 | 915 | """ |
|
916 | 916 | Checks for access permission for any of given predicates for specific |
|
917 | 917 | repository group. In order to fulfill the request any of predicates must be meet |
|
918 | 918 | """ |
|
919 | 919 | |
|
920 | 920 | def check_permissions(self): |
|
921 | 921 | group_name = get_repo_group_slug(request) |
|
922 | 922 | try: |
|
923 | 923 | user_perms = set([self.user_perms['repositories_groups'][group_name]]) |
|
924 | 924 | except KeyError: |
|
925 | 925 | return False |
|
926 | 926 | |
|
927 | 927 | if self.required_perms.intersection(user_perms): |
|
928 | 928 | return True |
|
929 | 929 | return False |
|
930 | 930 | |
|
931 | 931 | |
|
932 | 932 | class HasUserGroupPermissionAllDecorator(PermsDecorator): |
|
933 | 933 | """ |
|
934 | 934 | Checks for access permission for all given predicates for specific |
|
935 | 935 | user group. All of them have to be meet in order to fulfill the request |
|
936 | 936 | """ |
|
937 | 937 | |
|
938 | 938 | def check_permissions(self): |
|
939 | 939 | group_name = get_user_group_slug(request) |
|
940 | 940 | try: |
|
941 | 941 | user_perms = set([self.user_perms['user_groups'][group_name]]) |
|
942 | 942 | except KeyError: |
|
943 | 943 | return False |
|
944 | 944 | |
|
945 | 945 | if self.required_perms.issubset(user_perms): |
|
946 | 946 | return True |
|
947 | 947 | return False |
|
948 | 948 | |
|
949 | 949 | |
|
950 | 950 | class HasUserGroupPermissionAnyDecorator(PermsDecorator): |
|
951 | 951 | """ |
|
952 | 952 | Checks for access permission for any of given predicates for specific |
|
953 | 953 | user group. In order to fulfill the request any of predicates must be meet |
|
954 | 954 | """ |
|
955 | 955 | |
|
956 | 956 | def check_permissions(self): |
|
957 | 957 | group_name = get_user_group_slug(request) |
|
958 | 958 | try: |
|
959 | 959 | user_perms = set([self.user_perms['user_groups'][group_name]]) |
|
960 | 960 | except KeyError: |
|
961 | 961 | return False |
|
962 | 962 | |
|
963 | 963 | if self.required_perms.intersection(user_perms): |
|
964 | 964 | return True |
|
965 | 965 | return False |
|
966 | 966 | |
|
967 | 967 | |
|
968 | 968 | #============================================================================== |
|
969 | 969 | # CHECK FUNCTIONS |
|
970 | 970 | #============================================================================== |
|
971 | 971 | class PermsFunction(object): |
|
972 | 972 | """Base function for other check functions""" |
|
973 | 973 | |
|
974 | 974 | def __init__(self, *perms): |
|
975 | 975 | self.required_perms = set(perms) |
|
976 | 976 | self.user_perms = None |
|
977 | 977 | self.repo_name = None |
|
978 | 978 | self.group_name = None |
|
979 | 979 | |
|
980 | 980 | def __call__(self, check_location='', user=None): |
|
981 | 981 | if not user: |
|
982 | 982 | #TODO: remove this someday,put as user as attribute here |
|
983 | 983 | user = request.user |
|
984 | 984 | |
|
985 | 985 | # init auth user if not already given |
|
986 | 986 | if not isinstance(user, AuthUser): |
|
987 | 987 | user = AuthUser(user.user_id) |
|
988 | 988 | |
|
989 | 989 | cls_name = self.__class__.__name__ |
|
990 | 990 | check_scope = { |
|
991 | 991 | 'HasPermissionAll': '', |
|
992 | 992 | 'HasPermissionAny': '', |
|
993 | 993 | 'HasRepoPermissionAll': 'repo:%s' % self.repo_name, |
|
994 | 994 | 'HasRepoPermissionAny': 'repo:%s' % self.repo_name, |
|
995 | 995 | 'HasRepoGroupPermissionAll': 'group:%s' % self.group_name, |
|
996 | 996 | 'HasRepoGroupPermissionAny': 'group:%s' % self.group_name, |
|
997 | 997 | }.get(cls_name, '?') |
|
998 | 998 | log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name, |
|
999 | 999 | self.required_perms, user, check_scope, |
|
1000 | 1000 | check_location or 'unspecified location') |
|
1001 | 1001 | if not user: |
|
1002 | 1002 | log.debug('Empty request user') |
|
1003 | 1003 | return False |
|
1004 | 1004 | self.user_perms = user.permissions |
|
1005 | 1005 | if self.check_permissions(): |
|
1006 | log.debug('Permission to %s granted for user: %s @ %s' | |
|
1007 |
|
|
|
1008 |
check_location or 'unspecified location') |
|
|
1006 | log.debug('Permission to %s granted for user: %s @ %s', | |
|
1007 | check_scope, user, | |
|
1008 | check_location or 'unspecified location') | |
|
1009 | 1009 | return True |
|
1010 | 1010 | |
|
1011 | 1011 | else: |
|
1012 | log.debug('Permission to %s denied for user: %s @ %s' | |
|
1013 |
|
|
|
1014 |
check_location or 'unspecified location') |
|
|
1012 | log.debug('Permission to %s denied for user: %s @ %s', | |
|
1013 | check_scope, user, | |
|
1014 | check_location or 'unspecified location') | |
|
1015 | 1015 | return False |
|
1016 | 1016 | |
|
1017 | 1017 | def check_permissions(self): |
|
1018 | 1018 | """Dummy function for overriding""" |
|
1019 | 1019 | raise Exception('You have to write this function in child class') |
|
1020 | 1020 | |
|
1021 | 1021 | |
|
1022 | 1022 | class HasPermissionAll(PermsFunction): |
|
1023 | 1023 | def check_permissions(self): |
|
1024 | 1024 | if self.required_perms.issubset(self.user_perms.get('global')): |
|
1025 | 1025 | return True |
|
1026 | 1026 | return False |
|
1027 | 1027 | |
|
1028 | 1028 | |
|
1029 | 1029 | class HasPermissionAny(PermsFunction): |
|
1030 | 1030 | def check_permissions(self): |
|
1031 | 1031 | if self.required_perms.intersection(self.user_perms.get('global')): |
|
1032 | 1032 | return True |
|
1033 | 1033 | return False |
|
1034 | 1034 | |
|
1035 | 1035 | |
|
1036 | 1036 | class HasRepoPermissionAll(PermsFunction): |
|
1037 | 1037 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1038 | 1038 | self.repo_name = repo_name |
|
1039 | 1039 | return super(HasRepoPermissionAll, self).__call__(check_location, user) |
|
1040 | 1040 | |
|
1041 | 1041 | def check_permissions(self): |
|
1042 | 1042 | if not self.repo_name: |
|
1043 | 1043 | self.repo_name = get_repo_slug(request) |
|
1044 | 1044 | |
|
1045 | 1045 | try: |
|
1046 | 1046 | self._user_perms = set( |
|
1047 | 1047 | [self.user_perms['repositories'][self.repo_name]] |
|
1048 | 1048 | ) |
|
1049 | 1049 | except KeyError: |
|
1050 | 1050 | return False |
|
1051 | 1051 | if self.required_perms.issubset(self._user_perms): |
|
1052 | 1052 | return True |
|
1053 | 1053 | return False |
|
1054 | 1054 | |
|
1055 | 1055 | |
|
1056 | 1056 | class HasRepoPermissionAny(PermsFunction): |
|
1057 | 1057 | def __call__(self, repo_name=None, check_location='', user=None): |
|
1058 | 1058 | self.repo_name = repo_name |
|
1059 | 1059 | return super(HasRepoPermissionAny, self).__call__(check_location, user) |
|
1060 | 1060 | |
|
1061 | 1061 | def check_permissions(self): |
|
1062 | 1062 | if not self.repo_name: |
|
1063 | 1063 | self.repo_name = get_repo_slug(request) |
|
1064 | 1064 | |
|
1065 | 1065 | try: |
|
1066 | 1066 | self._user_perms = set( |
|
1067 | 1067 | [self.user_perms['repositories'][self.repo_name]] |
|
1068 | 1068 | ) |
|
1069 | 1069 | except KeyError: |
|
1070 | 1070 | return False |
|
1071 | 1071 | if self.required_perms.intersection(self._user_perms): |
|
1072 | 1072 | return True |
|
1073 | 1073 | return False |
|
1074 | 1074 | |
|
1075 | 1075 | |
|
1076 | 1076 | class HasRepoGroupPermissionAny(PermsFunction): |
|
1077 | 1077 | def __call__(self, group_name=None, check_location='', user=None): |
|
1078 | 1078 | self.group_name = group_name |
|
1079 | 1079 | return super(HasRepoGroupPermissionAny, self).__call__(check_location, user) |
|
1080 | 1080 | |
|
1081 | 1081 | def check_permissions(self): |
|
1082 | 1082 | try: |
|
1083 | 1083 | self._user_perms = set( |
|
1084 | 1084 | [self.user_perms['repositories_groups'][self.group_name]] |
|
1085 | 1085 | ) |
|
1086 | 1086 | except KeyError: |
|
1087 | 1087 | return False |
|
1088 | 1088 | if self.required_perms.intersection(self._user_perms): |
|
1089 | 1089 | return True |
|
1090 | 1090 | return False |
|
1091 | 1091 | |
|
1092 | 1092 | |
|
1093 | 1093 | class HasRepoGroupPermissionAll(PermsFunction): |
|
1094 | 1094 | def __call__(self, group_name=None, check_location='', user=None): |
|
1095 | 1095 | self.group_name = group_name |
|
1096 | 1096 | return super(HasRepoGroupPermissionAll, self).__call__(check_location, user) |
|
1097 | 1097 | |
|
1098 | 1098 | def check_permissions(self): |
|
1099 | 1099 | try: |
|
1100 | 1100 | self._user_perms = set( |
|
1101 | 1101 | [self.user_perms['repositories_groups'][self.group_name]] |
|
1102 | 1102 | ) |
|
1103 | 1103 | except KeyError: |
|
1104 | 1104 | return False |
|
1105 | 1105 | if self.required_perms.issubset(self._user_perms): |
|
1106 | 1106 | return True |
|
1107 | 1107 | return False |
|
1108 | 1108 | |
|
1109 | 1109 | |
|
1110 | 1110 | class HasUserGroupPermissionAny(PermsFunction): |
|
1111 | 1111 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1112 | 1112 | self.user_group_name = user_group_name |
|
1113 | 1113 | return super(HasUserGroupPermissionAny, self).__call__(check_location, user) |
|
1114 | 1114 | |
|
1115 | 1115 | def check_permissions(self): |
|
1116 | 1116 | try: |
|
1117 | 1117 | self._user_perms = set( |
|
1118 | 1118 | [self.user_perms['user_groups'][self.user_group_name]] |
|
1119 | 1119 | ) |
|
1120 | 1120 | except KeyError: |
|
1121 | 1121 | return False |
|
1122 | 1122 | if self.required_perms.intersection(self._user_perms): |
|
1123 | 1123 | return True |
|
1124 | 1124 | return False |
|
1125 | 1125 | |
|
1126 | 1126 | |
|
1127 | 1127 | class HasUserGroupPermissionAll(PermsFunction): |
|
1128 | 1128 | def __call__(self, user_group_name=None, check_location='', user=None): |
|
1129 | 1129 | self.user_group_name = user_group_name |
|
1130 | 1130 | return super(HasUserGroupPermissionAll, self).__call__(check_location, user) |
|
1131 | 1131 | |
|
1132 | 1132 | def check_permissions(self): |
|
1133 | 1133 | try: |
|
1134 | 1134 | self._user_perms = set( |
|
1135 | 1135 | [self.user_perms['user_groups'][self.user_group_name]] |
|
1136 | 1136 | ) |
|
1137 | 1137 | except KeyError: |
|
1138 | 1138 | return False |
|
1139 | 1139 | if self.required_perms.issubset(self._user_perms): |
|
1140 | 1140 | return True |
|
1141 | 1141 | return False |
|
1142 | 1142 | |
|
1143 | 1143 | |
|
1144 | 1144 | #============================================================================== |
|
1145 | 1145 | # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH |
|
1146 | 1146 | #============================================================================== |
|
1147 | 1147 | class HasPermissionAnyMiddleware(object): |
|
1148 | 1148 | def __init__(self, *perms): |
|
1149 | 1149 | self.required_perms = set(perms) |
|
1150 | 1150 | |
|
1151 | 1151 | def __call__(self, user, repo_name): |
|
1152 | 1152 | # repo_name MUST be unicode, since we handle keys in permission |
|
1153 | 1153 | # dict by unicode |
|
1154 | 1154 | repo_name = safe_unicode(repo_name) |
|
1155 | 1155 | usr = AuthUser(user.user_id) |
|
1156 | 1156 | self.user_perms = set([usr.permissions['repositories'][repo_name]]) |
|
1157 | 1157 | self.username = user.username |
|
1158 | 1158 | self.repo_name = repo_name |
|
1159 | 1159 | return self.check_permissions() |
|
1160 | 1160 | |
|
1161 | 1161 | def check_permissions(self): |
|
1162 | 1162 | log.debug('checking VCS protocol ' |
|
1163 | 1163 | 'permissions %s for user:%s repository:%s', self.user_perms, |
|
1164 | 1164 | self.username, self.repo_name) |
|
1165 | 1165 | if self.required_perms.intersection(self.user_perms): |
|
1166 | log.debug('Permission to repo: %s granted for user: %s @ %s' | |
|
1167 |
|
|
|
1166 | log.debug('Permission to repo: %s granted for user: %s @ %s', | |
|
1167 | self.repo_name, self.username, 'PermissionMiddleware') | |
|
1168 | 1168 | return True |
|
1169 | log.debug('Permission to repo: %s denied for user: %s @ %s' | |
|
1170 |
|
|
|
1169 | log.debug('Permission to repo: %s denied for user: %s @ %s', | |
|
1170 | self.repo_name, self.username, 'PermissionMiddleware') | |
|
1171 | 1171 | return False |
|
1172 | 1172 | |
|
1173 | 1173 | |
|
1174 | 1174 | #============================================================================== |
|
1175 | 1175 | # SPECIAL VERSION TO HANDLE API AUTH |
|
1176 | 1176 | #============================================================================== |
|
1177 | 1177 | class _BaseApiPerm(object): |
|
1178 | 1178 | def __init__(self, *perms): |
|
1179 | 1179 | self.required_perms = set(perms) |
|
1180 | 1180 | |
|
1181 | 1181 | def __call__(self, check_location=None, user=None, repo_name=None, |
|
1182 | 1182 | group_name=None): |
|
1183 | 1183 | cls_name = self.__class__.__name__ |
|
1184 | 1184 | check_scope = 'user:%s' % (user) |
|
1185 | 1185 | if repo_name: |
|
1186 | 1186 | check_scope += ', repo:%s' % (repo_name) |
|
1187 | 1187 | |
|
1188 | 1188 | if group_name: |
|
1189 | 1189 | check_scope += ', repo group:%s' % (group_name) |
|
1190 | 1190 | |
|
1191 | log.debug('checking cls:%s %s %s @ %s' | |
|
1192 |
|
|
|
1191 | log.debug('checking cls:%s %s %s @ %s', | |
|
1192 | cls_name, self.required_perms, check_scope, check_location) | |
|
1193 | 1193 | if not user: |
|
1194 | 1194 | log.debug('Empty User passed into arguments') |
|
1195 | 1195 | return False |
|
1196 | 1196 | |
|
1197 | 1197 | ## process user |
|
1198 | 1198 | if not isinstance(user, AuthUser): |
|
1199 | 1199 | user = AuthUser(user.user_id) |
|
1200 | 1200 | if not check_location: |
|
1201 | 1201 | check_location = 'unspecified' |
|
1202 | 1202 | if self.check_permissions(user.permissions, repo_name, group_name): |
|
1203 | log.debug('Permission to %s granted for user: %s @ %s' | |
|
1204 |
|
|
|
1203 | log.debug('Permission to %s granted for user: %s @ %s', | |
|
1204 | check_scope, user, check_location) | |
|
1205 | 1205 | return True |
|
1206 | 1206 | |
|
1207 | 1207 | else: |
|
1208 | log.debug('Permission to %s denied for user: %s @ %s' | |
|
1209 |
|
|
|
1208 | log.debug('Permission to %s denied for user: %s @ %s', | |
|
1209 | check_scope, user, check_location) | |
|
1210 | 1210 | return False |
|
1211 | 1211 | |
|
1212 | 1212 | def check_permissions(self, perm_defs, repo_name=None, group_name=None): |
|
1213 | 1213 | """ |
|
1214 | 1214 | implement in child class should return True if permissions are ok, |
|
1215 | 1215 | False otherwise |
|
1216 | 1216 | |
|
1217 | 1217 | :param perm_defs: dict with permission definitions |
|
1218 | 1218 | :param repo_name: repo name |
|
1219 | 1219 | """ |
|
1220 | 1220 | raise NotImplementedError() |
|
1221 | 1221 | |
|
1222 | 1222 | |
|
1223 | 1223 | class HasPermissionAllApi(_BaseApiPerm): |
|
1224 | 1224 | def check_permissions(self, perm_defs, repo_name=None, group_name=None): |
|
1225 | 1225 | if self.required_perms.issubset(perm_defs.get('global')): |
|
1226 | 1226 | return True |
|
1227 | 1227 | return False |
|
1228 | 1228 | |
|
1229 | 1229 | |
|
1230 | 1230 | class HasPermissionAnyApi(_BaseApiPerm): |
|
1231 | 1231 | def check_permissions(self, perm_defs, repo_name=None, group_name=None): |
|
1232 | 1232 | if self.required_perms.intersection(perm_defs.get('global')): |
|
1233 | 1233 | return True |
|
1234 | 1234 | return False |
|
1235 | 1235 | |
|
1236 | 1236 | |
|
1237 | 1237 | class HasRepoPermissionAllApi(_BaseApiPerm): |
|
1238 | 1238 | def check_permissions(self, perm_defs, repo_name=None, group_name=None): |
|
1239 | 1239 | try: |
|
1240 | 1240 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1241 | 1241 | except KeyError: |
|
1242 | 1242 | log.warning(traceback.format_exc()) |
|
1243 | 1243 | return False |
|
1244 | 1244 | if self.required_perms.issubset(_user_perms): |
|
1245 | 1245 | return True |
|
1246 | 1246 | return False |
|
1247 | 1247 | |
|
1248 | 1248 | |
|
1249 | 1249 | class HasRepoPermissionAnyApi(_BaseApiPerm): |
|
1250 | 1250 | def check_permissions(self, perm_defs, repo_name=None, group_name=None): |
|
1251 | 1251 | try: |
|
1252 | 1252 | _user_perms = set([perm_defs['repositories'][repo_name]]) |
|
1253 | 1253 | except KeyError: |
|
1254 | 1254 | log.warning(traceback.format_exc()) |
|
1255 | 1255 | return False |
|
1256 | 1256 | if self.required_perms.intersection(_user_perms): |
|
1257 | 1257 | return True |
|
1258 | 1258 | return False |
|
1259 | 1259 | |
|
1260 | 1260 | |
|
1261 | 1261 | class HasRepoGroupPermissionAnyApi(_BaseApiPerm): |
|
1262 | 1262 | def check_permissions(self, perm_defs, repo_name=None, group_name=None): |
|
1263 | 1263 | try: |
|
1264 | 1264 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1265 | 1265 | except KeyError: |
|
1266 | 1266 | log.warning(traceback.format_exc()) |
|
1267 | 1267 | return False |
|
1268 | 1268 | if self.required_perms.intersection(_user_perms): |
|
1269 | 1269 | return True |
|
1270 | 1270 | return False |
|
1271 | 1271 | |
|
1272 | 1272 | class HasRepoGroupPermissionAllApi(_BaseApiPerm): |
|
1273 | 1273 | def check_permissions(self, perm_defs, repo_name=None, group_name=None): |
|
1274 | 1274 | try: |
|
1275 | 1275 | _user_perms = set([perm_defs['repositories_groups'][group_name]]) |
|
1276 | 1276 | except KeyError: |
|
1277 | 1277 | log.warning(traceback.format_exc()) |
|
1278 | 1278 | return False |
|
1279 | 1279 | if self.required_perms.issubset(_user_perms): |
|
1280 | 1280 | return True |
|
1281 | 1281 | return False |
|
1282 | 1282 | |
|
1283 | 1283 | def check_ip_access(source_ip, allowed_ips=None): |
|
1284 | 1284 | """ |
|
1285 | 1285 | Checks if source_ip is a subnet of any of allowed_ips. |
|
1286 | 1286 | |
|
1287 | 1287 | :param source_ip: |
|
1288 | 1288 | :param allowed_ips: list of allowed ips together with mask |
|
1289 | 1289 | """ |
|
1290 | 1290 | from kallithea.lib import ipaddr |
|
1291 |
log.debug('checking if ip:%s is subnet of %s' |
|
|
1291 | log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips) | |
|
1292 | 1292 | if isinstance(allowed_ips, (tuple, list, set)): |
|
1293 | 1293 | for ip in allowed_ips: |
|
1294 | 1294 | if ipaddr.IPAddress(source_ip) in ipaddr.IPNetwork(ip): |
|
1295 |
log.debug('IP %s is network %s' |
|
|
1296 |
|
|
|
1295 | log.debug('IP %s is network %s', | |
|
1296 | ipaddr.IPAddress(source_ip), ipaddr.IPNetwork(ip)) | |
|
1297 | 1297 | return True |
|
1298 | 1298 | return False |
@@ -1,431 +1,431 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | Authentication modules |
|
16 | 16 | """ |
|
17 | 17 | |
|
18 | 18 | import logging |
|
19 | 19 | import traceback |
|
20 | 20 | |
|
21 | 21 | from kallithea import EXTERN_TYPE_INTERNAL |
|
22 | 22 | from kallithea.lib.compat import importlib |
|
23 | 23 | from kallithea.lib.utils2 import str2bool |
|
24 | 24 | from kallithea.lib.compat import formatted_json, hybrid_property |
|
25 | 25 | from kallithea.lib.auth import PasswordGenerator |
|
26 | 26 | from kallithea.model.user import UserModel |
|
27 | 27 | from kallithea.model.db import Setting, User |
|
28 | 28 | from kallithea.model.meta import Session |
|
29 | 29 | from kallithea.model.user_group import UserGroupModel |
|
30 | 30 | |
|
31 | 31 | log = logging.getLogger(__name__) |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | class LazyFormencode(object): |
|
35 | 35 | def __init__(self, formencode_obj, *args, **kwargs): |
|
36 | 36 | self.formencode_obj = formencode_obj |
|
37 | 37 | self.args = args |
|
38 | 38 | self.kwargs = kwargs |
|
39 | 39 | |
|
40 | 40 | def __call__(self, *args, **kwargs): |
|
41 | 41 | from inspect import isfunction |
|
42 | 42 | formencode_obj = self.formencode_obj |
|
43 | 43 | if isfunction(formencode_obj): |
|
44 | 44 | #case we wrap validators into functions |
|
45 | 45 | formencode_obj = self.formencode_obj(*args, **kwargs) |
|
46 | 46 | return formencode_obj(*self.args, **self.kwargs) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class KallitheaAuthPluginBase(object): |
|
50 | 50 | auth_func_attrs = { |
|
51 | 51 | "username": "unique username", |
|
52 | 52 | "firstname": "first name", |
|
53 | 53 | "lastname": "last name", |
|
54 | 54 | "email": "email address", |
|
55 | 55 | "groups": '["list", "of", "groups"]', |
|
56 | 56 | "extern_name": "name in external source of record", |
|
57 | 57 | "admin": 'True|False defines if user should be Kallithea admin', |
|
58 | 58 | "active": 'True|False defines active state of user in Kallithea', |
|
59 | 59 | "active_from_extern": "True|False|None, active state from the external auth, " |
|
60 | 60 | "None means use value from the auth plugin" |
|
61 | 61 | } |
|
62 | 62 | |
|
63 | 63 | @property |
|
64 | 64 | def validators(self): |
|
65 | 65 | """ |
|
66 | 66 | Exposes Kallithea validators modules |
|
67 | 67 | """ |
|
68 | 68 | # this is a hack to overcome issues with pylons threadlocals and |
|
69 | 69 | # translator object _() not being registered properly. |
|
70 | 70 | class LazyCaller(object): |
|
71 | 71 | def __init__(self, name): |
|
72 | 72 | self.validator_name = name |
|
73 | 73 | |
|
74 | 74 | def __call__(self, *args, **kwargs): |
|
75 | 75 | from kallithea.model import validators as v |
|
76 | 76 | obj = getattr(v, self.validator_name) |
|
77 |
#log.debug('Initializing lazy formencode object: %s' |
|
|
77 | #log.debug('Initializing lazy formencode object: %s', obj) | |
|
78 | 78 | return LazyFormencode(obj, *args, **kwargs) |
|
79 | 79 | |
|
80 | 80 | |
|
81 | 81 | class ProxyGet(object): |
|
82 | 82 | def __getattribute__(self, name): |
|
83 | 83 | return LazyCaller(name) |
|
84 | 84 | |
|
85 | 85 | return ProxyGet() |
|
86 | 86 | |
|
87 | 87 | @hybrid_property |
|
88 | 88 | def name(self): |
|
89 | 89 | """ |
|
90 | 90 | Returns the name of this authentication plugin. |
|
91 | 91 | |
|
92 | 92 | :returns: string |
|
93 | 93 | """ |
|
94 | 94 | raise NotImplementedError("Not implemented in base class") |
|
95 | 95 | |
|
96 | 96 | @hybrid_property |
|
97 | 97 | def is_container_auth(self): |
|
98 | 98 | """ |
|
99 | 99 | Returns bool if this module uses container auth. |
|
100 | 100 | |
|
101 | 101 | This property will trigger an automatic call to authenticate on |
|
102 | 102 | a visit to the website or during a push/pull. |
|
103 | 103 | |
|
104 | 104 | :returns: bool |
|
105 | 105 | """ |
|
106 | 106 | return False |
|
107 | 107 | |
|
108 | 108 | def accepts(self, user, accepts_empty=True): |
|
109 | 109 | """ |
|
110 | 110 | Checks if this authentication module should accept a request for |
|
111 | 111 | the current user. |
|
112 | 112 | |
|
113 | 113 | :param user: user object fetched using plugin's get_user() method. |
|
114 | 114 | :param accepts_empty: if True accepts don't allow the user to be empty |
|
115 | 115 | :returns: boolean |
|
116 | 116 | """ |
|
117 | 117 | plugin_name = self.name |
|
118 | 118 | if not user and not accepts_empty: |
|
119 | 119 | log.debug('User is empty not allowed to authenticate') |
|
120 | 120 | return False |
|
121 | 121 | |
|
122 | 122 | if user and user.extern_type and user.extern_type != plugin_name: |
|
123 | log.debug('User %s should authenticate using %s this is %s, skipping' | |
|
124 |
|
|
|
123 | log.debug('User %s should authenticate using %s this is %s, skipping', | |
|
124 | user, user.extern_type, plugin_name) | |
|
125 | 125 | |
|
126 | 126 | return False |
|
127 | 127 | return True |
|
128 | 128 | |
|
129 | 129 | def get_user(self, username=None, **kwargs): |
|
130 | 130 | """ |
|
131 | 131 | Helper method for user fetching in plugins, by default it's using |
|
132 | 132 | simple fetch by username, but this method can be customized in plugins |
|
133 | 133 | eg. container auth plugin to fetch user by environ params |
|
134 | 134 | |
|
135 | 135 | :param username: username if given to fetch from database |
|
136 | 136 | :param kwargs: extra arguments needed for user fetching. |
|
137 | 137 | """ |
|
138 | 138 | user = None |
|
139 | log.debug('Trying to fetch user `%s` from Kallithea database' | |
|
140 |
|
|
|
139 | log.debug('Trying to fetch user `%s` from Kallithea database', | |
|
140 | username) | |
|
141 | 141 | if username: |
|
142 | 142 | user = User.get_by_username(username) |
|
143 | 143 | if not user: |
|
144 | 144 | log.debug('Fallback to fetch user in case insensitive mode') |
|
145 | 145 | user = User.get_by_username(username, case_insensitive=True) |
|
146 | 146 | else: |
|
147 |
log.debug('provided username:`%s` is empty skipping...' |
|
|
147 | log.debug('provided username:`%s` is empty skipping...', username) | |
|
148 | 148 | return user |
|
149 | 149 | |
|
150 | 150 | def settings(self): |
|
151 | 151 | """ |
|
152 | 152 | Return a list of the form: |
|
153 | 153 | [ |
|
154 | 154 | { |
|
155 | 155 | "name": "OPTION_NAME", |
|
156 | 156 | "type": "[bool|password|string|int|select]", |
|
157 | 157 | ["values": ["opt1", "opt2", ...]] |
|
158 | 158 | "validator": "expr" |
|
159 | 159 | "description": "A short description of the option" [, |
|
160 | 160 | "default": Default Value], |
|
161 | 161 | ["formname": "Friendly Name for Forms"] |
|
162 | 162 | } [, ...] |
|
163 | 163 | ] |
|
164 | 164 | |
|
165 | 165 | This is used to interrogate the authentication plugin as to what |
|
166 | 166 | settings it expects to be present and configured. |
|
167 | 167 | |
|
168 | 168 | 'type' is a shorthand notation for what kind of value this option is. |
|
169 | 169 | This is primarily used by the auth web form to control how the option |
|
170 | 170 | is configured. |
|
171 | 171 | bool : checkbox |
|
172 | 172 | password : password input box |
|
173 | 173 | string : input box |
|
174 | 174 | select : single select dropdown |
|
175 | 175 | |
|
176 | 176 | 'validator' is an lazy instantiated form field validator object, ala |
|
177 | 177 | formencode. You need to *call* this object to init the validators. |
|
178 | 178 | All calls to Kallithea validators should be used through self.validators |
|
179 | 179 | which is a lazy loading proxy of formencode module. |
|
180 | 180 | """ |
|
181 | 181 | raise NotImplementedError("Not implemented in base class") |
|
182 | 182 | |
|
183 | 183 | def plugin_settings(self): |
|
184 | 184 | """ |
|
185 | 185 | This method is called by the authentication framework, not the .settings() |
|
186 | 186 | method. This method adds a few default settings (e.g., "enabled"), so that |
|
187 | 187 | plugin authors don't have to maintain a bunch of boilerplate. |
|
188 | 188 | |
|
189 | 189 | OVERRIDING THIS METHOD WILL CAUSE YOUR PLUGIN TO FAIL. |
|
190 | 190 | """ |
|
191 | 191 | |
|
192 | 192 | rcsettings = self.settings() |
|
193 | 193 | rcsettings.insert(0, { |
|
194 | 194 | "name": "enabled", |
|
195 | 195 | "validator": self.validators.StringBoolean(if_missing=False), |
|
196 | 196 | "type": "bool", |
|
197 | 197 | "description": "Enable or Disable this Authentication Plugin", |
|
198 | 198 | "formname": "Enabled" |
|
199 | 199 | } |
|
200 | 200 | ) |
|
201 | 201 | return rcsettings |
|
202 | 202 | |
|
203 | 203 | def user_activation_state(self): |
|
204 | 204 | """ |
|
205 | 205 | Defines user activation state when creating new users |
|
206 | 206 | |
|
207 | 207 | :returns: boolean |
|
208 | 208 | """ |
|
209 | 209 | raise NotImplementedError("Not implemented in base class") |
|
210 | 210 | |
|
211 | 211 | def auth(self, userobj, username, passwd, settings, **kwargs): |
|
212 | 212 | """ |
|
213 | 213 | Given a user object (which may be None), username, a plaintext password, |
|
214 | 214 | and a settings object (containing all the keys needed as listed in settings()), |
|
215 | 215 | authenticate this user's login attempt. |
|
216 | 216 | |
|
217 | 217 | Return None on failure. On success, return a dictionary with keys from |
|
218 | 218 | KallitheaAuthPluginBase.auth_func_attrs. |
|
219 | 219 | |
|
220 | 220 | This is later validated for correctness. |
|
221 | 221 | """ |
|
222 | 222 | raise NotImplementedError("not implemented in base class") |
|
223 | 223 | |
|
224 | 224 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
225 | 225 | """ |
|
226 | 226 | Wrapper to call self.auth() that validates call on it |
|
227 | 227 | |
|
228 | 228 | :param userobj: userobj |
|
229 | 229 | :param username: username |
|
230 | 230 | :param passwd: plaintext password |
|
231 | 231 | :param settings: plugin settings |
|
232 | 232 | """ |
|
233 | 233 | user_data = self.auth(userobj, username, passwd, settings, **kwargs) |
|
234 | 234 | if user_data is not None: |
|
235 | 235 | return self._validate_auth_return(user_data) |
|
236 | 236 | return None |
|
237 | 237 | |
|
238 | 238 | def _validate_auth_return(self, user_data): |
|
239 | 239 | if not isinstance(user_data, dict): |
|
240 | 240 | raise Exception('returned value from auth must be a dict') |
|
241 | 241 | for k in self.auth_func_attrs: |
|
242 | 242 | if k not in user_data: |
|
243 | 243 | raise Exception('Missing %s attribute from returned data' % k) |
|
244 | 244 | return user_data |
|
245 | 245 | |
|
246 | 246 | |
|
247 | 247 | class KallitheaExternalAuthPlugin(KallitheaAuthPluginBase): |
|
248 | 248 | def use_fake_password(self): |
|
249 | 249 | """ |
|
250 | 250 | Return a boolean that indicates whether or not we should set the user's |
|
251 | 251 | password to a random value when it is authenticated by this plugin. |
|
252 | 252 | If your plugin provides authentication, then you will generally want this. |
|
253 | 253 | |
|
254 | 254 | :returns: boolean |
|
255 | 255 | """ |
|
256 | 256 | raise NotImplementedError("Not implemented in base class") |
|
257 | 257 | |
|
258 | 258 | def _authenticate(self, userobj, username, passwd, settings, **kwargs): |
|
259 | 259 | user_data = super(KallitheaExternalAuthPlugin, self)._authenticate( |
|
260 | 260 | userobj, username, passwd, settings, **kwargs) |
|
261 | 261 | if user_data is not None: |
|
262 | 262 | # maybe plugin will clean the username ? |
|
263 | 263 | # we should use the return value |
|
264 | 264 | username = user_data['username'] |
|
265 | 265 | # if user is not active from our extern type we should fail to auth |
|
266 | 266 | # this can prevent from creating users in Kallithea when using |
|
267 | 267 | # external authentication, but if it's inactive user we shouldn't |
|
268 | 268 | # create that user anyway |
|
269 | 269 | if user_data['active_from_extern'] is False: |
|
270 | log.warning("User %s authenticated against %s, but is inactive" | |
|
271 |
|
|
|
270 | log.warning("User %s authenticated against %s, but is inactive", | |
|
271 | username, self.__module__) | |
|
272 | 272 | return None |
|
273 | 273 | |
|
274 | 274 | if self.use_fake_password(): |
|
275 | 275 | # Randomize the PW because we don't need it, but don't want |
|
276 | 276 | # them blank either |
|
277 | 277 | passwd = PasswordGenerator().gen_password(length=8) |
|
278 | 278 | |
|
279 | log.debug('Updating or creating user info from %s plugin' | |
|
280 |
|
|
|
279 | log.debug('Updating or creating user info from %s plugin', | |
|
280 | self.name) | |
|
281 | 281 | user = UserModel().create_or_update( |
|
282 | 282 | username=username, |
|
283 | 283 | password=passwd, |
|
284 | 284 | email=user_data["email"], |
|
285 | 285 | firstname=user_data["firstname"], |
|
286 | 286 | lastname=user_data["lastname"], |
|
287 | 287 | active=user_data["active"], |
|
288 | 288 | admin=user_data["admin"], |
|
289 | 289 | extern_name=user_data["extern_name"], |
|
290 | 290 | extern_type=self.name |
|
291 | 291 | ) |
|
292 | 292 | Session().flush() |
|
293 | 293 | # enforce user is just in given groups, all of them has to be ones |
|
294 | 294 | # created from plugins. We store this info in _group_data JSON field |
|
295 | 295 | groups = user_data['groups'] or [] |
|
296 | 296 | UserGroupModel().enforce_groups(user, groups, self.name) |
|
297 | 297 | Session().commit() |
|
298 | 298 | return user_data |
|
299 | 299 | |
|
300 | 300 | |
|
301 | 301 | def importplugin(plugin): |
|
302 | 302 | """ |
|
303 | 303 | Imports and returns the authentication plugin in the module named by plugin |
|
304 | 304 | (e.g., plugin='kallithea.lib.auth_modules.auth_internal'). Returns the |
|
305 | 305 | KallitheaAuthPluginBase subclass on success, raises exceptions on failure. |
|
306 | 306 | |
|
307 | 307 | raises: |
|
308 | 308 | AttributeError -- no KallitheaAuthPlugin class in the module |
|
309 | 309 | TypeError -- if the KallitheaAuthPlugin is not a subclass of ours KallitheaAuthPluginBase |
|
310 | 310 | ImportError -- if we couldn't import the plugin at all |
|
311 | 311 | """ |
|
312 |
log.debug("Importing %s" |
|
|
312 | log.debug("Importing %s", plugin) | |
|
313 | 313 | if not plugin.startswith(u'kallithea.lib.auth_modules.auth_'): |
|
314 | 314 | parts = plugin.split(u'.lib.auth_modules.auth_', 1) |
|
315 | 315 | if len(parts) == 2: |
|
316 | 316 | _module, pn = parts |
|
317 | 317 | if pn == EXTERN_TYPE_INTERNAL: |
|
318 | 318 | pn = "internal" |
|
319 | 319 | plugin = u'kallithea.lib.auth_modules.auth_' + pn |
|
320 | 320 | PLUGIN_CLASS_NAME = "KallitheaAuthPlugin" |
|
321 | 321 | try: |
|
322 | 322 | module = importlib.import_module(plugin) |
|
323 | 323 | except (ImportError, TypeError): |
|
324 | 324 | log.error(traceback.format_exc()) |
|
325 | 325 | # TODO: make this more error prone, if by some accident we screw up |
|
326 | 326 | # the plugin name, the crash is pretty bad and hard to recover |
|
327 | 327 | raise |
|
328 | 328 | |
|
329 | log.debug("Loaded auth plugin from %s (module:%s, file:%s)" | |
|
330 |
|
|
|
329 | log.debug("Loaded auth plugin from %s (module:%s, file:%s)", | |
|
330 | plugin, module.__name__, module.__file__) | |
|
331 | 331 | |
|
332 | 332 | pluginclass = getattr(module, PLUGIN_CLASS_NAME) |
|
333 | 333 | if not issubclass(pluginclass, KallitheaAuthPluginBase): |
|
334 | 334 | raise TypeError("Authentication class %s.KallitheaAuthPlugin is not " |
|
335 | 335 | "a subclass of %s" % (plugin, KallitheaAuthPluginBase)) |
|
336 | 336 | return pluginclass |
|
337 | 337 | |
|
338 | 338 | |
|
339 | 339 | def loadplugin(plugin): |
|
340 | 340 | """ |
|
341 | 341 | Loads and returns an instantiated authentication plugin. |
|
342 | 342 | |
|
343 | 343 | see: importplugin |
|
344 | 344 | """ |
|
345 | 345 | plugin = importplugin(plugin)() |
|
346 | 346 | if plugin.plugin_settings.im_func != KallitheaAuthPluginBase.plugin_settings.im_func: |
|
347 | 347 | raise TypeError("Authentication class %s.KallitheaAuthPluginBase " |
|
348 | 348 | "has overridden the plugin_settings method, which is " |
|
349 | 349 | "forbidden." % plugin) |
|
350 | 350 | return plugin |
|
351 | 351 | |
|
352 | 352 | |
|
353 | 353 | def authenticate(username, password, environ=None): |
|
354 | 354 | """ |
|
355 | 355 | Authentication function used for access control, |
|
356 | 356 | It tries to authenticate based on enabled authentication modules. |
|
357 | 357 | |
|
358 | 358 | :param username: username can be empty for container auth |
|
359 | 359 | :param password: password can be empty for container auth |
|
360 | 360 | :param environ: environ headers passed for container auth |
|
361 | 361 | :returns: None if auth failed, user_data dict if auth is correct |
|
362 | 362 | """ |
|
363 | 363 | |
|
364 | 364 | auth_plugins = Setting.get_auth_plugins() |
|
365 |
log.debug('Authentication against %s plugins' |
|
|
365 | log.debug('Authentication against %s plugins', auth_plugins) | |
|
366 | 366 | for module in auth_plugins: |
|
367 | 367 | try: |
|
368 | 368 | plugin = loadplugin(module) |
|
369 | 369 | except (ImportError, AttributeError, TypeError) as e: |
|
370 | 370 | raise ImportError('Failed to load authentication module %s : %s' |
|
371 | 371 | % (module, str(e))) |
|
372 |
log.debug('Trying authentication using ** %s **' |
|
|
372 | log.debug('Trying authentication using ** %s **', module) | |
|
373 | 373 | # load plugin settings from Kallithea database |
|
374 | 374 | plugin_name = plugin.name |
|
375 | 375 | plugin_settings = {} |
|
376 | 376 | for v in plugin.plugin_settings(): |
|
377 | 377 | conf_key = "auth_%s_%s" % (plugin_name, v["name"]) |
|
378 | 378 | setting = Setting.get_by_name(conf_key) |
|
379 | 379 | plugin_settings[v["name"]] = setting.app_settings_value if setting else None |
|
380 |
log.debug('Plugin settings \n%s' |
|
|
380 | log.debug('Plugin settings \n%s', formatted_json(plugin_settings)) | |
|
381 | 381 | |
|
382 | 382 | if not str2bool(plugin_settings["enabled"]): |
|
383 | log.info("Authentication plugin %s is disabled, skipping for %s" | |
|
384 |
|
|
|
383 | log.info("Authentication plugin %s is disabled, skipping for %s", | |
|
384 | module, username) | |
|
385 | 385 | continue |
|
386 | 386 | |
|
387 | 387 | # use plugin's method of user extraction. |
|
388 | 388 | user = plugin.get_user(username, environ=environ, |
|
389 | 389 | settings=plugin_settings) |
|
390 |
log.debug('Plugin %s extracted user is `%s`' |
|
|
390 | log.debug('Plugin %s extracted user is `%s`', module, user) | |
|
391 | 391 | if not plugin.accepts(user): |
|
392 | log.debug('Plugin %s does not accept user `%s` for authentication' | |
|
393 |
|
|
|
392 | log.debug('Plugin %s does not accept user `%s` for authentication', | |
|
393 | module, user) | |
|
394 | 394 | continue |
|
395 | 395 | else: |
|
396 | log.debug('Plugin %s accepted user `%s` for authentication' | |
|
397 |
|
|
|
396 | log.debug('Plugin %s accepted user `%s` for authentication', | |
|
397 | module, user) | |
|
398 | 398 | |
|
399 |
log.info('Authenticating user using %s plugin' |
|
|
399 | log.info('Authenticating user using %s plugin', plugin.__module__) | |
|
400 | 400 | # _authenticate is a wrapper for .auth() method of plugin. |
|
401 | 401 | # it checks if .auth() sends proper data. For KallitheaExternalAuthPlugin |
|
402 | 402 | # it also maps users to Database and maps the attributes returned |
|
403 | 403 | # from .auth() to Kallithea database. If this function returns data |
|
404 | 404 | # then auth is correct. |
|
405 | 405 | user_data = plugin._authenticate(user, username, password, |
|
406 | 406 | plugin_settings, |
|
407 | 407 | environ=environ or {}) |
|
408 |
log.debug('PLUGIN USER DATA: %s' |
|
|
408 | log.debug('PLUGIN USER DATA: %s', user_data) | |
|
409 | 409 | |
|
410 | 410 | if user_data is not None: |
|
411 | 411 | log.debug('Plugin returned proper authentication data') |
|
412 | 412 | return user_data |
|
413 | 413 | |
|
414 | 414 | # we failed to Auth because .auth() method didn't return the user |
|
415 | 415 | if username: |
|
416 | log.warning("User `%s` failed to authenticate against %s" | |
|
417 |
|
|
|
416 | log.warning("User `%s` failed to authenticate against %s", | |
|
417 | username, plugin.__module__) | |
|
418 | 418 | return None |
|
419 | 419 | |
|
420 | 420 | def get_managed_fields(user): |
|
421 | 421 | """return list of fields that are managed by the user's auth source, usually some of |
|
422 | 422 | 'username', 'firstname', 'lastname', 'email', 'active', 'password' |
|
423 | 423 | """ |
|
424 | 424 | auth_plugins = Setting.get_auth_plugins() |
|
425 | 425 | for module in auth_plugins: |
|
426 | 426 | log.debug('testing %s (%s) with auth plugin %s', user, user.extern_type, module) |
|
427 | 427 | plugin = loadplugin(module) |
|
428 | 428 | if plugin.name == user.extern_type: |
|
429 | 429 | return plugin.get_managed_fields() |
|
430 | 430 | log.error('no auth plugin %s found for %s', user.extern_type, user) |
|
431 | 431 | return [] # TODO: Fail badly instead of allowing everything to be edited? |
@@ -1,195 +1,195 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.auth_modules.auth_container |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea container based authentication plugin |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Created on Nov 17, 2012 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import logging |
|
29 | 29 | from kallithea.lib import auth_modules |
|
30 | 30 | from kallithea.lib.utils2 import str2bool, safe_unicode |
|
31 | 31 | from kallithea.lib.compat import hybrid_property |
|
32 | 32 | from kallithea.model.db import User |
|
33 | 33 | |
|
34 | 34 | log = logging.getLogger(__name__) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin): |
|
38 | 38 | def __init__(self): |
|
39 | 39 | pass |
|
40 | 40 | |
|
41 | 41 | @hybrid_property |
|
42 | 42 | def name(self): |
|
43 | 43 | return "container" |
|
44 | 44 | |
|
45 | 45 | @hybrid_property |
|
46 | 46 | def is_container_auth(self): |
|
47 | 47 | return True |
|
48 | 48 | |
|
49 | 49 | def settings(self): |
|
50 | 50 | |
|
51 | 51 | settings = [ |
|
52 | 52 | { |
|
53 | 53 | "name": "header", |
|
54 | 54 | "validator": self.validators.UnicodeString(strip=True, not_empty=True), |
|
55 | 55 | "type": "string", |
|
56 | 56 | "description": "Header to extract the user from", |
|
57 | 57 | "default": "REMOTE_USER", |
|
58 | 58 | "formname": "Header" |
|
59 | 59 | }, |
|
60 | 60 | { |
|
61 | 61 | "name": "fallback_header", |
|
62 | 62 | "validator": self.validators.UnicodeString(strip=True), |
|
63 | 63 | "type": "string", |
|
64 | 64 | "description": "Header to extract the user from when main one fails", |
|
65 | 65 | "default": "HTTP_X_FORWARDED_USER", |
|
66 | 66 | "formname": "Fallback header" |
|
67 | 67 | }, |
|
68 | 68 | { |
|
69 | 69 | "name": "clean_username", |
|
70 | 70 | "validator": self.validators.StringBoolean(if_missing=False), |
|
71 | 71 | "type": "bool", |
|
72 | 72 | "description": "Perform cleaning of user, if passed user has @ in username " |
|
73 | 73 | "then first part before @ is taken. " |
|
74 | 74 | "If there's \\ in the username only the part after \\ is taken", |
|
75 | 75 | "default": "True", |
|
76 | 76 | "formname": "Clean username" |
|
77 | 77 | }, |
|
78 | 78 | ] |
|
79 | 79 | return settings |
|
80 | 80 | |
|
81 | 81 | def use_fake_password(self): |
|
82 | 82 | return True |
|
83 | 83 | |
|
84 | 84 | def user_activation_state(self): |
|
85 | 85 | def_user_perms = User.get_default_user().AuthUser.permissions['global'] |
|
86 | 86 | return 'hg.extern_activate.auto' in def_user_perms |
|
87 | 87 | |
|
88 | 88 | def _clean_username(self, username): |
|
89 | 89 | # Removing realm and domain from username |
|
90 | 90 | username = username.partition('@')[0] |
|
91 | 91 | username = username.rpartition('\\')[2] |
|
92 | 92 | return username |
|
93 | 93 | |
|
94 | 94 | def _get_username(self, environ, settings): |
|
95 | 95 | username = None |
|
96 | 96 | environ = environ or {} |
|
97 | 97 | if not environ: |
|
98 |
log.debug('got empty environ: %s' |
|
|
98 | log.debug('got empty environ: %s', environ) | |
|
99 | 99 | |
|
100 | 100 | settings = settings or {} |
|
101 | 101 | if settings.get('header'): |
|
102 | 102 | header = settings.get('header') |
|
103 | 103 | username = environ.get(header) |
|
104 |
log.debug('extracted %s:%s' |
|
|
104 | log.debug('extracted %s:%s', header, username) | |
|
105 | 105 | |
|
106 | 106 | # fallback mode |
|
107 | 107 | if not username and settings.get('fallback_header'): |
|
108 | 108 | header = settings.get('fallback_header') |
|
109 | 109 | username = environ.get(header) |
|
110 |
log.debug('extracted %s:%s' |
|
|
110 | log.debug('extracted %s:%s', header, username) | |
|
111 | 111 | |
|
112 | 112 | if username and str2bool(settings.get('clean_username')): |
|
113 |
log.debug('Received username %s from container' |
|
|
113 | log.debug('Received username %s from container', username) | |
|
114 | 114 | username = self._clean_username(username) |
|
115 |
log.debug('New cleanup user is: %s' |
|
|
115 | log.debug('New cleanup user is: %s', username) | |
|
116 | 116 | return username |
|
117 | 117 | |
|
118 | 118 | def get_user(self, username=None, **kwargs): |
|
119 | 119 | """ |
|
120 | 120 | Helper method for user fetching in plugins, by default it's using |
|
121 | 121 | simple fetch by username, but this method can be customized in plugins |
|
122 | 122 | eg. container auth plugin to fetch user by environ params |
|
123 | 123 | :param username: username if given to fetch |
|
124 | 124 | :param kwargs: extra arguments needed for user fetching. |
|
125 | 125 | """ |
|
126 | 126 | environ = kwargs.get('environ') or {} |
|
127 | 127 | settings = kwargs.get('settings') or {} |
|
128 | 128 | username = self._get_username(environ, settings) |
|
129 | 129 | # we got the username, so use default method now |
|
130 | 130 | return super(KallitheaAuthPlugin, self).get_user(username) |
|
131 | 131 | |
|
132 | 132 | def auth(self, userobj, username, password, settings, **kwargs): |
|
133 | 133 | """ |
|
134 | 134 | Gets the container_auth username (or email). It tries to get username |
|
135 | 135 | from REMOTE_USER if this plugin is enabled, if that fails |
|
136 | 136 | it tries to get username from HTTP_X_FORWARDED_USER if fallback header |
|
137 | 137 | is set. clean_username extracts the username from this data if it's |
|
138 | 138 | having @ in it. |
|
139 | 139 | Return None on failure. On success, return a dictionary of the form: |
|
140 | 140 | |
|
141 | 141 | see: KallitheaAuthPluginBase.auth_func_attrs |
|
142 | 142 | |
|
143 | 143 | :param userobj: |
|
144 | 144 | :param username: |
|
145 | 145 | :param password: |
|
146 | 146 | :param settings: |
|
147 | 147 | :param kwargs: |
|
148 | 148 | """ |
|
149 | 149 | environ = kwargs.get('environ') |
|
150 | 150 | if not environ: |
|
151 | 151 | log.debug('Empty environ data skipping...') |
|
152 | 152 | return None |
|
153 | 153 | |
|
154 | 154 | if not userobj: |
|
155 | 155 | userobj = self.get_user('', environ=environ, settings=settings) |
|
156 | 156 | |
|
157 | 157 | # we don't care passed username/password for container auth plugins. |
|
158 | 158 | # only way to log in is using environ |
|
159 | 159 | username = None |
|
160 | 160 | if userobj: |
|
161 | 161 | username = getattr(userobj, 'username') |
|
162 | 162 | |
|
163 | 163 | if not username: |
|
164 | 164 | # we don't have any objects in DB, user doesn't exist, extract |
|
165 | 165 | # username from environ based on the settings |
|
166 | 166 | username = self._get_username(environ, settings) |
|
167 | 167 | |
|
168 | 168 | # if cannot fetch username, it's a no-go for this plugin to proceed |
|
169 | 169 | if not username: |
|
170 | 170 | return None |
|
171 | 171 | |
|
172 | 172 | # old attrs fetched from Kallithea database |
|
173 | 173 | admin = getattr(userobj, 'admin', False) |
|
174 | 174 | active = getattr(userobj, 'active', True) |
|
175 | 175 | email = getattr(userobj, 'email', '') |
|
176 | 176 | firstname = getattr(userobj, 'firstname', '') |
|
177 | 177 | lastname = getattr(userobj, 'lastname', '') |
|
178 | 178 | |
|
179 | 179 | user_data = { |
|
180 | 180 | 'username': username, |
|
181 | 181 | 'firstname': safe_unicode(firstname or username), |
|
182 | 182 | 'lastname': safe_unicode(lastname or ''), |
|
183 | 183 | 'groups': [], |
|
184 | 184 | 'email': email or '', |
|
185 | 185 | 'admin': admin or False, |
|
186 | 186 | 'active': active, |
|
187 | 187 | 'active_from_extern': True, |
|
188 | 188 | 'extern_name': username, |
|
189 | 189 | } |
|
190 | 190 | |
|
191 |
log.info('user `%s` authenticated correctly' |
|
|
191 | log.info('user `%s` authenticated correctly', user_data['username']) | |
|
192 | 192 | return user_data |
|
193 | 193 | |
|
194 | 194 | def get_managed_fields(self): |
|
195 | 195 | return ['username', 'password'] |
@@ -1,245 +1,245 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.auth_modules.auth_crowd |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea authentication plugin for Atlassian CROWD |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Created on Nov 17, 2012 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import base64 |
|
30 | 30 | import logging |
|
31 | 31 | import urllib2 |
|
32 | 32 | from kallithea.lib import auth_modules |
|
33 | 33 | from kallithea.lib.compat import json, formatted_json, hybrid_property |
|
34 | 34 | from kallithea.model.db import User |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class CrowdServer(object): |
|
40 | 40 | def __init__(self, *args, **kwargs): |
|
41 | 41 | """ |
|
42 | 42 | Create a new CrowdServer object that points to IP/Address 'host', |
|
43 | 43 | on the given port, and using the given method (https/http). user and |
|
44 | 44 | passwd can be set here or with set_credentials. If unspecified, |
|
45 | 45 | "version" defaults to "latest". |
|
46 | 46 | |
|
47 | 47 | example:: |
|
48 | 48 | |
|
49 | 49 | cserver = CrowdServer(host="127.0.0.1", |
|
50 | 50 | port="8095", |
|
51 | 51 | user="some_app", |
|
52 | 52 | passwd="some_passwd", |
|
53 | 53 | version="1") |
|
54 | 54 | """ |
|
55 | 55 | if not "port" in kwargs: |
|
56 | 56 | kwargs["port"] = "8095" |
|
57 | 57 | self._logger = kwargs.get("logger", logging.getLogger(__name__)) |
|
58 | 58 | self._uri = "%s://%s:%s/crowd" % (kwargs.get("method", "http"), |
|
59 | 59 | kwargs.get("host", "127.0.0.1"), |
|
60 | 60 | kwargs.get("port", "8095")) |
|
61 | 61 | self.set_credentials(kwargs.get("user", ""), |
|
62 | 62 | kwargs.get("passwd", "")) |
|
63 | 63 | self._version = kwargs.get("version", "latest") |
|
64 | 64 | self._url_list = None |
|
65 | 65 | self._appname = "crowd" |
|
66 | 66 | |
|
67 | 67 | def set_credentials(self, user, passwd): |
|
68 | 68 | self.user = user |
|
69 | 69 | self.passwd = passwd |
|
70 | 70 | self._make_opener() |
|
71 | 71 | |
|
72 | 72 | def _make_opener(self): |
|
73 | 73 | mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() |
|
74 | 74 | mgr.add_password(None, self._uri, self.user, self.passwd) |
|
75 | 75 | handler = urllib2.HTTPBasicAuthHandler(mgr) |
|
76 | 76 | self.opener = urllib2.build_opener(handler) |
|
77 | 77 | |
|
78 | 78 | def _request(self, url, body=None, headers=None, |
|
79 | 79 | method=None, noformat=False, |
|
80 | 80 | empty_response_ok=False): |
|
81 | 81 | _headers = {"Content-type": "application/json", |
|
82 | 82 | "Accept": "application/json"} |
|
83 | 83 | if self.user and self.passwd: |
|
84 | 84 | authstring = base64.b64encode("%s:%s" % (self.user, self.passwd)) |
|
85 | 85 | _headers["Authorization"] = "Basic %s" % authstring |
|
86 | 86 | if headers: |
|
87 | 87 | _headers.update(headers) |
|
88 | log.debug("Sent crowd: \n%s" | |
|
89 |
|
|
|
90 |
"headers": _headers})) |
|
|
88 | log.debug("Sent crowd: \n%s", | |
|
89 | formatted_json({"url": url, "body": body, | |
|
90 | "headers": _headers})) | |
|
91 | 91 | request = urllib2.Request(url, body, _headers) |
|
92 | 92 | if method: |
|
93 | 93 | request.get_method = lambda: method |
|
94 | 94 | |
|
95 | 95 | global msg |
|
96 | 96 | msg = "" |
|
97 | 97 | try: |
|
98 | 98 | rdoc = self.opener.open(request) |
|
99 | 99 | msg = "".join(rdoc.readlines()) |
|
100 | 100 | if not msg and empty_response_ok: |
|
101 | 101 | rval = {} |
|
102 | 102 | rval["status"] = True |
|
103 | 103 | rval["error"] = "Response body was empty" |
|
104 | 104 | elif not noformat: |
|
105 | 105 | rval = json.loads(msg) |
|
106 | 106 | rval["status"] = True |
|
107 | 107 | else: |
|
108 | 108 | rval = "".join(rdoc.readlines()) |
|
109 | 109 | except Exception as e: |
|
110 | 110 | if not noformat: |
|
111 | 111 | rval = {"status": False, |
|
112 | 112 | "body": body, |
|
113 | 113 | "error": str(e) + "\n" + msg} |
|
114 | 114 | else: |
|
115 | 115 | rval = None |
|
116 | 116 | return rval |
|
117 | 117 | |
|
118 | 118 | def user_auth(self, username, password): |
|
119 | 119 | """Authenticate a user against crowd. Returns brief information about |
|
120 | 120 | the user.""" |
|
121 | 121 | url = ("%s/rest/usermanagement/%s/authentication?username=%s" |
|
122 | 122 | % (self._uri, self._version, username)) |
|
123 | 123 | body = json.dumps({"value": password}) |
|
124 | 124 | return self._request(url, body) |
|
125 | 125 | |
|
126 | 126 | def user_groups(self, username): |
|
127 | 127 | """Retrieve a list of groups to which this user belongs.""" |
|
128 | 128 | url = ("%s/rest/usermanagement/%s/user/group/nested?username=%s" |
|
129 | 129 | % (self._uri, self._version, username)) |
|
130 | 130 | return self._request(url) |
|
131 | 131 | |
|
132 | 132 | |
|
133 | 133 | class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin): |
|
134 | 134 | |
|
135 | 135 | @hybrid_property |
|
136 | 136 | def name(self): |
|
137 | 137 | return "crowd" |
|
138 | 138 | |
|
139 | 139 | def settings(self): |
|
140 | 140 | settings = [ |
|
141 | 141 | { |
|
142 | 142 | "name": "host", |
|
143 | 143 | "validator": self.validators.UnicodeString(strip=True), |
|
144 | 144 | "type": "string", |
|
145 | 145 | "description": "The FQDN or IP of the Atlassian CROWD Server", |
|
146 | 146 | "default": "127.0.0.1", |
|
147 | 147 | "formname": "Host" |
|
148 | 148 | }, |
|
149 | 149 | { |
|
150 | 150 | "name": "port", |
|
151 | 151 | "validator": self.validators.Number(strip=True), |
|
152 | 152 | "type": "int", |
|
153 | 153 | "description": "The Port in use by the Atlassian CROWD Server", |
|
154 | 154 | "default": 8095, |
|
155 | 155 | "formname": "Port" |
|
156 | 156 | }, |
|
157 | 157 | { |
|
158 | 158 | "name": "app_name", |
|
159 | 159 | "validator": self.validators.UnicodeString(strip=True), |
|
160 | 160 | "type": "string", |
|
161 | 161 | "description": "The Application Name to authenticate to CROWD", |
|
162 | 162 | "default": "", |
|
163 | 163 | "formname": "Application Name" |
|
164 | 164 | }, |
|
165 | 165 | { |
|
166 | 166 | "name": "app_password", |
|
167 | 167 | "validator": self.validators.UnicodeString(strip=True), |
|
168 | 168 | "type": "string", |
|
169 | 169 | "description": "The password to authenticate to CROWD", |
|
170 | 170 | "default": "", |
|
171 | 171 | "formname": "Application Password" |
|
172 | 172 | }, |
|
173 | 173 | { |
|
174 | 174 | "name": "admin_groups", |
|
175 | 175 | "validator": self.validators.UnicodeString(strip=True), |
|
176 | 176 | "type": "string", |
|
177 | 177 | "description": "A comma separated list of group names that identify users as Kallithea Administrators", |
|
178 | 178 | "formname": "Admin Groups" |
|
179 | 179 | } |
|
180 | 180 | ] |
|
181 | 181 | return settings |
|
182 | 182 | |
|
183 | 183 | def use_fake_password(self): |
|
184 | 184 | return True |
|
185 | 185 | |
|
186 | 186 | def user_activation_state(self): |
|
187 | 187 | def_user_perms = User.get_default_user().AuthUser.permissions['global'] |
|
188 | 188 | return 'hg.extern_activate.auto' in def_user_perms |
|
189 | 189 | |
|
190 | 190 | def auth(self, userobj, username, password, settings, **kwargs): |
|
191 | 191 | """ |
|
192 | 192 | Given a user object (which may be null), username, a plaintext password, |
|
193 | 193 | and a settings object (containing all the keys needed as listed in settings()), |
|
194 | 194 | authenticate this user's login attempt. |
|
195 | 195 | |
|
196 | 196 | Return None on failure. On success, return a dictionary of the form: |
|
197 | 197 | |
|
198 | 198 | see: KallitheaAuthPluginBase.auth_func_attrs |
|
199 | 199 | This is later validated for correctness |
|
200 | 200 | """ |
|
201 | 201 | if not username or not password: |
|
202 | 202 | log.debug('Empty username or password skipping...') |
|
203 | 203 | return None |
|
204 | 204 | |
|
205 |
log.debug("Crowd settings: \n%s" |
|
|
205 | log.debug("Crowd settings: \n%s", formatted_json(settings)) | |
|
206 | 206 | server = CrowdServer(**settings) |
|
207 | 207 | server.set_credentials(settings["app_name"], settings["app_password"]) |
|
208 | 208 | crowd_user = server.user_auth(username, password) |
|
209 |
log.debug("Crowd returned: \n%s" |
|
|
209 | log.debug("Crowd returned: \n%s", formatted_json(crowd_user)) | |
|
210 | 210 | if not crowd_user["status"]: |
|
211 | 211 | return None |
|
212 | 212 | |
|
213 | 213 | res = server.user_groups(crowd_user["name"]) |
|
214 |
log.debug("Crowd groups: \n%s" |
|
|
214 | log.debug("Crowd groups: \n%s", formatted_json(res)) | |
|
215 | 215 | crowd_user["groups"] = [x["name"] for x in res["groups"]] |
|
216 | 216 | |
|
217 | 217 | # old attrs fetched from Kallithea database |
|
218 | 218 | admin = getattr(userobj, 'admin', False) |
|
219 | 219 | active = getattr(userobj, 'active', True) |
|
220 | 220 | email = getattr(userobj, 'email', '') |
|
221 | 221 | firstname = getattr(userobj, 'firstname', '') |
|
222 | 222 | lastname = getattr(userobj, 'lastname', '') |
|
223 | 223 | |
|
224 | 224 | user_data = { |
|
225 | 225 | 'username': username, |
|
226 | 226 | 'firstname': crowd_user["first-name"] or firstname, |
|
227 | 227 | 'lastname': crowd_user["last-name"] or lastname, |
|
228 | 228 | 'groups': crowd_user["groups"], |
|
229 | 229 | 'email': crowd_user["email"] or email, |
|
230 | 230 | 'admin': admin, |
|
231 | 231 | 'active': active, |
|
232 | 232 | 'active_from_extern': crowd_user.get('active'), # ??? |
|
233 | 233 | 'extern_name': crowd_user["name"], |
|
234 | 234 | } |
|
235 | 235 | |
|
236 | 236 | # set an admin if we're in admin_groups of crowd |
|
237 | 237 | for group in settings["admin_groups"].split(","): |
|
238 | 238 | if group in user_data["groups"]: |
|
239 | 239 | user_data["admin"] = True |
|
240 |
log.debug("Final crowd user object: \n%s" |
|
|
241 |
log.info('user %s authenticated correctly' |
|
|
240 | log.debug("Final crowd user object: \n%s", formatted_json(user_data)) | |
|
241 | log.info('user %s authenticated correctly', user_data['username']) | |
|
242 | 242 | return user_data |
|
243 | 243 | |
|
244 | 244 | def get_managed_fields(self): |
|
245 | 245 | return ['username', 'firstname', 'lastname', 'email', 'password'] |
@@ -1,103 +1,103 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.auth_modules.auth_internal |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea authentication plugin for built in internal auth |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Created on Nov 17, 2012 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import logging |
|
30 | 30 | |
|
31 | 31 | from kallithea import EXTERN_TYPE_INTERNAL |
|
32 | 32 | from kallithea.lib import auth_modules |
|
33 | 33 | from kallithea.lib.compat import formatted_json, hybrid_property |
|
34 | 34 | from kallithea.model.db import User |
|
35 | 35 | |
|
36 | 36 | log = logging.getLogger(__name__) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | class KallitheaAuthPlugin(auth_modules.KallitheaAuthPluginBase): |
|
40 | 40 | def __init__(self): |
|
41 | 41 | pass |
|
42 | 42 | |
|
43 | 43 | @hybrid_property |
|
44 | 44 | def name(self): |
|
45 | 45 | return EXTERN_TYPE_INTERNAL |
|
46 | 46 | |
|
47 | 47 | def settings(self): |
|
48 | 48 | return [] |
|
49 | 49 | |
|
50 | 50 | def user_activation_state(self): |
|
51 | 51 | def_user_perms = User.get_default_user().AuthUser.permissions['global'] |
|
52 | 52 | return 'hg.register.auto_activate' in def_user_perms |
|
53 | 53 | |
|
54 | 54 | def accepts(self, user, accepts_empty=True): |
|
55 | 55 | """ |
|
56 | 56 | Custom accepts for this auth that doesn't accept empty users. We |
|
57 | 57 | know that user exists in database. |
|
58 | 58 | """ |
|
59 | 59 | return super(KallitheaAuthPlugin, self).accepts(user, |
|
60 | 60 | accepts_empty=False) |
|
61 | 61 | |
|
62 | 62 | def auth(self, userobj, username, password, settings, **kwargs): |
|
63 | 63 | if not userobj: |
|
64 |
log.debug('userobj was:%s skipping' |
|
|
64 | log.debug('userobj was:%s skipping', userobj) | |
|
65 | 65 | return None |
|
66 | 66 | if userobj.extern_type != self.name: |
|
67 | log.warning("userobj:%s extern_type mismatch got:`%s` expected:`%s`" | |
|
68 |
|
|
|
67 | log.warning("userobj:%s extern_type mismatch got:`%s` expected:`%s`", | |
|
68 | userobj, userobj.extern_type, self.name) | |
|
69 | 69 | return None |
|
70 | 70 | |
|
71 | 71 | user_data = { |
|
72 | 72 | "username": userobj.username, |
|
73 | 73 | "firstname": userobj.firstname, |
|
74 | 74 | "lastname": userobj.lastname, |
|
75 | 75 | "groups": [], |
|
76 | 76 | "email": userobj.email, |
|
77 | 77 | "admin": userobj.admin, |
|
78 | 78 | "active": userobj.active, |
|
79 | 79 | "active_from_extern": userobj.active, |
|
80 | 80 | "extern_name": userobj.user_id, |
|
81 | 81 | } |
|
82 | 82 | |
|
83 | 83 | log.debug(formatted_json(user_data)) |
|
84 | 84 | if userobj.active: |
|
85 | 85 | from kallithea.lib import auth |
|
86 | 86 | password_match = auth.KallitheaCrypto.hash_check(password, userobj.password) |
|
87 | 87 | if userobj.username == User.DEFAULT_USER and userobj.active: |
|
88 |
log.info('user %s authenticated correctly as anonymous user' |
|
|
88 | log.info('user %s authenticated correctly as anonymous user', | |
|
89 | 89 | username) |
|
90 | 90 | return user_data |
|
91 | 91 | |
|
92 | 92 | elif userobj.username == username and password_match: |
|
93 |
log.info('user %s authenticated correctly' |
|
|
93 | log.info('user %s authenticated correctly', user_data['username']) | |
|
94 | 94 | return user_data |
|
95 |
log.error("user %s had a bad password" |
|
|
95 | log.error("user %s had a bad password", username) | |
|
96 | 96 | return None |
|
97 | 97 | else: |
|
98 |
log.warning('user %s tried auth but is disabled' |
|
|
98 | log.warning('user %s tried auth but is disabled', username) | |
|
99 | 99 | return None |
|
100 | 100 | |
|
101 | 101 | def get_managed_fields(self): |
|
102 | 102 | # Note: 'username' should only be editable (at least for user) if self registration is enabled |
|
103 | 103 | return [] |
@@ -1,364 +1,364 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.auth_modules.auth_ldap |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea authentication plugin for LDAP |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Created on Nov 17, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import logging |
|
30 | 30 | import traceback |
|
31 | 31 | |
|
32 | 32 | from kallithea.lib import auth_modules |
|
33 | 33 | from kallithea.lib.compat import hybrid_property |
|
34 | 34 | from kallithea.lib.utils2 import safe_unicode, safe_str |
|
35 | 35 | from kallithea.lib.exceptions import ( |
|
36 | 36 | LdapConnectionError, LdapUsernameError, LdapPasswordError, LdapImportError |
|
37 | 37 | ) |
|
38 | 38 | from kallithea.model.db import User |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | try: |
|
43 | 43 | import ldap |
|
44 | 44 | except ImportError: |
|
45 | 45 | # means that python-ldap is not installed |
|
46 | 46 | ldap = None |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | class AuthLdap(object): |
|
50 | 50 | |
|
51 | 51 | def __init__(self, server, base_dn, port=389, bind_dn='', bind_pass='', |
|
52 | 52 | tls_kind='PLAIN', tls_reqcert='DEMAND', ldap_version=3, |
|
53 | 53 | ldap_filter='(&(objectClass=user)(!(objectClass=computer)))', |
|
54 | 54 | search_scope='SUBTREE', attr_login='uid'): |
|
55 | 55 | if ldap is None: |
|
56 | 56 | raise LdapImportError |
|
57 | 57 | |
|
58 | 58 | self.ldap_version = ldap_version |
|
59 | 59 | ldap_server_type = 'ldap' |
|
60 | 60 | |
|
61 | 61 | self.TLS_KIND = tls_kind |
|
62 | 62 | |
|
63 | 63 | if self.TLS_KIND == 'LDAPS': |
|
64 | 64 | port = port or 689 |
|
65 | 65 | ldap_server_type = ldap_server_type + 's' |
|
66 | 66 | |
|
67 | 67 | OPT_X_TLS_DEMAND = 2 |
|
68 | 68 | self.TLS_REQCERT = getattr(ldap, 'OPT_X_TLS_%s' % tls_reqcert, |
|
69 | 69 | OPT_X_TLS_DEMAND) |
|
70 | 70 | # split server into list |
|
71 | 71 | self.LDAP_SERVER_ADDRESS = server.split(',') |
|
72 | 72 | self.LDAP_SERVER_PORT = port |
|
73 | 73 | |
|
74 | 74 | # USE FOR READ ONLY BIND TO LDAP SERVER |
|
75 | 75 | self.LDAP_BIND_DN = safe_str(bind_dn) |
|
76 | 76 | self.LDAP_BIND_PASS = safe_str(bind_pass) |
|
77 | 77 | _LDAP_SERVERS = [] |
|
78 | 78 | for host in self.LDAP_SERVER_ADDRESS: |
|
79 | 79 | _LDAP_SERVERS.append("%s://%s:%s" % (ldap_server_type, |
|
80 | 80 | host.replace(' ', ''), |
|
81 | 81 | self.LDAP_SERVER_PORT)) |
|
82 | 82 | self.LDAP_SERVER = str(', '.join(s for s in _LDAP_SERVERS)) |
|
83 | 83 | self.BASE_DN = safe_str(base_dn) |
|
84 | 84 | self.LDAP_FILTER = safe_str(ldap_filter) |
|
85 | 85 | self.SEARCH_SCOPE = getattr(ldap, 'SCOPE_%s' % search_scope) |
|
86 | 86 | self.attr_login = attr_login |
|
87 | 87 | |
|
88 | 88 | def authenticate_ldap(self, username, password): |
|
89 | 89 | """ |
|
90 | 90 | Authenticate a user via LDAP and return his/her LDAP properties. |
|
91 | 91 | |
|
92 | 92 | Raises AuthenticationError if the credentials are rejected, or |
|
93 | 93 | EnvironmentError if the LDAP server can't be reached. |
|
94 | 94 | |
|
95 | 95 | :param username: username |
|
96 | 96 | :param password: password |
|
97 | 97 | """ |
|
98 | 98 | |
|
99 | 99 | from kallithea.lib.helpers import chop_at |
|
100 | 100 | |
|
101 | 101 | uid = chop_at(username, "@%s" % self.LDAP_SERVER_ADDRESS) |
|
102 | 102 | |
|
103 | 103 | if not password: |
|
104 | 104 | log.debug("Attempt to authenticate LDAP user " |
|
105 | 105 | "with blank password rejected.") |
|
106 | 106 | raise LdapPasswordError() |
|
107 | 107 | if "," in username: |
|
108 | 108 | raise LdapUsernameError("invalid character in username: ,") |
|
109 | 109 | try: |
|
110 | 110 | if hasattr(ldap, 'OPT_X_TLS_CACERTDIR'): |
|
111 | 111 | ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, |
|
112 | 112 | '/etc/openldap/cacerts') |
|
113 | 113 | ldap.set_option(ldap.OPT_REFERRALS, ldap.OPT_OFF) |
|
114 | 114 | ldap.set_option(ldap.OPT_RESTART, ldap.OPT_ON) |
|
115 | 115 | ldap.set_option(ldap.OPT_TIMEOUT, 20) |
|
116 | 116 | ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10) |
|
117 | 117 | ldap.set_option(ldap.OPT_TIMELIMIT, 15) |
|
118 | 118 | if self.TLS_KIND != 'PLAIN': |
|
119 | 119 | ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, self.TLS_REQCERT) |
|
120 | 120 | server = ldap.initialize(self.LDAP_SERVER) |
|
121 | 121 | if self.ldap_version == 2: |
|
122 | 122 | server.protocol = ldap.VERSION2 |
|
123 | 123 | else: |
|
124 | 124 | server.protocol = ldap.VERSION3 |
|
125 | 125 | |
|
126 | 126 | if self.TLS_KIND == 'START_TLS': |
|
127 | 127 | server.start_tls_s() |
|
128 | 128 | |
|
129 | 129 | if self.LDAP_BIND_DN and self.LDAP_BIND_PASS: |
|
130 | log.debug('Trying simple_bind with password and given DN: %s' | |
|
131 |
|
|
|
130 | log.debug('Trying simple_bind with password and given DN: %s', | |
|
131 | self.LDAP_BIND_DN) | |
|
132 | 132 | server.simple_bind_s(self.LDAP_BIND_DN, self.LDAP_BIND_PASS) |
|
133 | 133 | |
|
134 | 134 | filter_ = '(&%s(%s=%s))' % (self.LDAP_FILTER, self.attr_login, |
|
135 | 135 | username) |
|
136 | 136 | log.debug("Authenticating %r filter %s at %s", self.BASE_DN, |
|
137 | 137 | filter_, self.LDAP_SERVER) |
|
138 | 138 | lobjects = server.search_ext_s(self.BASE_DN, self.SEARCH_SCOPE, |
|
139 | 139 | filter_) |
|
140 | 140 | |
|
141 | 141 | if not lobjects: |
|
142 | 142 | raise ldap.NO_SUCH_OBJECT() |
|
143 | 143 | |
|
144 | 144 | for (dn, _attrs) in lobjects: |
|
145 | 145 | if dn is None: |
|
146 | 146 | continue |
|
147 | 147 | |
|
148 | 148 | try: |
|
149 |
log.debug('Trying simple bind with %s' |
|
|
149 | log.debug('Trying simple bind with %s', dn) | |
|
150 | 150 | server.simple_bind_s(dn, safe_str(password)) |
|
151 | 151 | attrs = server.search_ext_s(dn, ldap.SCOPE_BASE, |
|
152 | 152 | '(objectClass=*)')[0][1] |
|
153 | 153 | break |
|
154 | 154 | |
|
155 | 155 | except ldap.INVALID_CREDENTIALS: |
|
156 | log.debug("LDAP rejected password for user '%s' (%s): %s" | |
|
157 |
|
|
|
156 | log.debug("LDAP rejected password for user '%s' (%s): %s", | |
|
157 | uid, username, dn) | |
|
158 | 158 | |
|
159 | 159 | else: |
|
160 | 160 | log.debug("No matching LDAP objects for authentication " |
|
161 | 161 | "of '%s' (%s)", uid, username) |
|
162 | 162 | raise LdapPasswordError() |
|
163 | 163 | |
|
164 | 164 | except ldap.NO_SUCH_OBJECT: |
|
165 |
log.debug("LDAP says no such user '%s' (%s)" |
|
|
165 | log.debug("LDAP says no such user '%s' (%s)", uid, username) | |
|
166 | 166 | raise LdapUsernameError() |
|
167 | 167 | except ldap.SERVER_DOWN: |
|
168 | 168 | raise LdapConnectionError("LDAP can't access authentication server") |
|
169 | 169 | |
|
170 | 170 | return dn, attrs |
|
171 | 171 | |
|
172 | 172 | |
|
173 | 173 | class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin): |
|
174 | 174 | def __init__(self): |
|
175 | 175 | self._logger = logging.getLogger(__name__) |
|
176 | 176 | self._tls_kind_values = ["PLAIN", "LDAPS", "START_TLS"] |
|
177 | 177 | self._tls_reqcert_values = ["NEVER", "ALLOW", "TRY", "DEMAND", "HARD"] |
|
178 | 178 | self._search_scopes = ["BASE", "ONELEVEL", "SUBTREE"] |
|
179 | 179 | |
|
180 | 180 | @hybrid_property |
|
181 | 181 | def name(self): |
|
182 | 182 | return "ldap" |
|
183 | 183 | |
|
184 | 184 | def settings(self): |
|
185 | 185 | settings = [ |
|
186 | 186 | { |
|
187 | 187 | "name": "host", |
|
188 | 188 | "validator": self.validators.UnicodeString(strip=True), |
|
189 | 189 | "type": "string", |
|
190 | 190 | "description": "Host of the LDAP Server", |
|
191 | 191 | "formname": "LDAP Host" |
|
192 | 192 | }, |
|
193 | 193 | { |
|
194 | 194 | "name": "port", |
|
195 | 195 | "validator": self.validators.Number(strip=True, not_empty=True), |
|
196 | 196 | "type": "string", |
|
197 | 197 | "description": "Port that the LDAP server is listening on", |
|
198 | 198 | "default": 389, |
|
199 | 199 | "formname": "Port" |
|
200 | 200 | }, |
|
201 | 201 | { |
|
202 | 202 | "name": "dn_user", |
|
203 | 203 | "validator": self.validators.UnicodeString(strip=True), |
|
204 | 204 | "type": "string", |
|
205 | 205 | "description": "User to connect to LDAP", |
|
206 | 206 | "formname": "Account" |
|
207 | 207 | }, |
|
208 | 208 | { |
|
209 | 209 | "name": "dn_pass", |
|
210 | 210 | "validator": self.validators.UnicodeString(strip=True), |
|
211 | 211 | "type": "password", |
|
212 | 212 | "description": "Password to connect to LDAP", |
|
213 | 213 | "formname": "Password" |
|
214 | 214 | }, |
|
215 | 215 | { |
|
216 | 216 | "name": "tls_kind", |
|
217 | 217 | "validator": self.validators.OneOf(self._tls_kind_values), |
|
218 | 218 | "type": "select", |
|
219 | 219 | "values": self._tls_kind_values, |
|
220 | 220 | "description": "TLS Type", |
|
221 | 221 | "default": 'PLAIN', |
|
222 | 222 | "formname": "Connection Security" |
|
223 | 223 | }, |
|
224 | 224 | { |
|
225 | 225 | "name": "tls_reqcert", |
|
226 | 226 | "validator": self.validators.OneOf(self._tls_reqcert_values), |
|
227 | 227 | "type": "select", |
|
228 | 228 | "values": self._tls_reqcert_values, |
|
229 | 229 | "description": "Require Cert over TLS?", |
|
230 | 230 | "formname": "Certificate Checks" |
|
231 | 231 | }, |
|
232 | 232 | { |
|
233 | 233 | "name": "base_dn", |
|
234 | 234 | "validator": self.validators.UnicodeString(strip=True), |
|
235 | 235 | "type": "string", |
|
236 | 236 | "description": "Base DN to search (e.g., dc=mydomain,dc=com)", |
|
237 | 237 | "formname": "Base DN" |
|
238 | 238 | }, |
|
239 | 239 | { |
|
240 | 240 | "name": "filter", |
|
241 | 241 | "validator": self.validators.UnicodeString(strip=True), |
|
242 | 242 | "type": "string", |
|
243 | 243 | "description": "Filter to narrow results (e.g., ou=Users, etc)", |
|
244 | 244 | "formname": "LDAP Search Filter" |
|
245 | 245 | }, |
|
246 | 246 | { |
|
247 | 247 | "name": "search_scope", |
|
248 | 248 | "validator": self.validators.OneOf(self._search_scopes), |
|
249 | 249 | "type": "select", |
|
250 | 250 | "values": self._search_scopes, |
|
251 | 251 | "description": "How deep to search LDAP", |
|
252 | 252 | "formname": "LDAP Search Scope" |
|
253 | 253 | }, |
|
254 | 254 | { |
|
255 | 255 | "name": "attr_login", |
|
256 | 256 | "validator": self.validators.AttrLoginValidator(not_empty=True, strip=True), |
|
257 | 257 | "type": "string", |
|
258 | 258 | "description": "LDAP Attribute to map to user name", |
|
259 | 259 | "formname": "Login Attribute" |
|
260 | 260 | }, |
|
261 | 261 | { |
|
262 | 262 | "name": "attr_firstname", |
|
263 | 263 | "validator": self.validators.UnicodeString(strip=True), |
|
264 | 264 | "type": "string", |
|
265 | 265 | "description": "LDAP Attribute to map to first name", |
|
266 | 266 | "formname": "First Name Attribute" |
|
267 | 267 | }, |
|
268 | 268 | { |
|
269 | 269 | "name": "attr_lastname", |
|
270 | 270 | "validator": self.validators.UnicodeString(strip=True), |
|
271 | 271 | "type": "string", |
|
272 | 272 | "description": "LDAP Attribute to map to last name", |
|
273 | 273 | "formname": "Last Name Attribute" |
|
274 | 274 | }, |
|
275 | 275 | { |
|
276 | 276 | "name": "attr_email", |
|
277 | 277 | "validator": self.validators.UnicodeString(strip=True), |
|
278 | 278 | "type": "string", |
|
279 | 279 | "description": "LDAP Attribute to map to email address", |
|
280 | 280 | "formname": "Email Attribute" |
|
281 | 281 | } |
|
282 | 282 | ] |
|
283 | 283 | return settings |
|
284 | 284 | |
|
285 | 285 | def use_fake_password(self): |
|
286 | 286 | return True |
|
287 | 287 | |
|
288 | 288 | def user_activation_state(self): |
|
289 | 289 | def_user_perms = User.get_default_user().AuthUser.permissions['global'] |
|
290 | 290 | return 'hg.extern_activate.auto' in def_user_perms |
|
291 | 291 | |
|
292 | 292 | def auth(self, userobj, username, password, settings, **kwargs): |
|
293 | 293 | """ |
|
294 | 294 | Given a user object (which may be null), username, a plaintext password, |
|
295 | 295 | and a settings object (containing all the keys needed as listed in settings()), |
|
296 | 296 | authenticate this user's login attempt. |
|
297 | 297 | |
|
298 | 298 | Return None on failure. On success, return a dictionary of the form: |
|
299 | 299 | |
|
300 | 300 | see: KallitheaAuthPluginBase.auth_func_attrs |
|
301 | 301 | This is later validated for correctness |
|
302 | 302 | """ |
|
303 | 303 | |
|
304 | 304 | if not username or not password: |
|
305 | 305 | log.debug('Empty username or password skipping...') |
|
306 | 306 | return None |
|
307 | 307 | |
|
308 | 308 | kwargs = { |
|
309 | 309 | 'server': settings.get('host', ''), |
|
310 | 310 | 'base_dn': settings.get('base_dn', ''), |
|
311 | 311 | 'port': settings.get('port'), |
|
312 | 312 | 'bind_dn': settings.get('dn_user'), |
|
313 | 313 | 'bind_pass': settings.get('dn_pass'), |
|
314 | 314 | 'tls_kind': settings.get('tls_kind'), |
|
315 | 315 | 'tls_reqcert': settings.get('tls_reqcert'), |
|
316 | 316 | 'ldap_filter': settings.get('filter'), |
|
317 | 317 | 'search_scope': settings.get('search_scope'), |
|
318 | 318 | 'attr_login': settings.get('attr_login'), |
|
319 | 319 | 'ldap_version': 3, |
|
320 | 320 | } |
|
321 | 321 | |
|
322 | 322 | if kwargs['bind_dn'] and not kwargs['bind_pass']: |
|
323 | 323 | log.debug('Using dynamic binding.') |
|
324 | 324 | kwargs['bind_dn'] = kwargs['bind_dn'].replace('$login', username) |
|
325 | 325 | kwargs['bind_pass'] = password |
|
326 | 326 | log.debug('Checking for ldap authentication') |
|
327 | 327 | |
|
328 | 328 | try: |
|
329 | 329 | aldap = AuthLdap(**kwargs) |
|
330 | 330 | (user_dn, ldap_attrs) = aldap.authenticate_ldap(username, password) |
|
331 |
log.debug('Got ldap DN response %s' |
|
|
331 | log.debug('Got ldap DN response %s', user_dn) | |
|
332 | 332 | |
|
333 | 333 | get_ldap_attr = lambda k: ldap_attrs.get(settings.get(k), [''])[0] |
|
334 | 334 | |
|
335 | 335 | # old attrs fetched from Kallithea database |
|
336 | 336 | admin = getattr(userobj, 'admin', False) |
|
337 | 337 | active = getattr(userobj, 'active', self.user_activation_state()) |
|
338 | 338 | email = getattr(userobj, 'email', '') |
|
339 | 339 | firstname = getattr(userobj, 'firstname', '') |
|
340 | 340 | lastname = getattr(userobj, 'lastname', '') |
|
341 | 341 | |
|
342 | 342 | user_data = { |
|
343 | 343 | 'username': username, |
|
344 | 344 | 'firstname': safe_unicode(get_ldap_attr('attr_firstname') or firstname), |
|
345 | 345 | 'lastname': safe_unicode(get_ldap_attr('attr_lastname') or lastname), |
|
346 | 346 | 'groups': [], |
|
347 | 347 | 'email': get_ldap_attr('attr_email') or email, |
|
348 | 348 | 'admin': admin, |
|
349 | 349 | 'active': active, |
|
350 | 350 | "active_from_extern": None, |
|
351 | 351 | 'extern_name': user_dn, |
|
352 | 352 | } |
|
353 |
log.info('user %s authenticated correctly' |
|
|
353 | log.info('user %s authenticated correctly', user_data['username']) | |
|
354 | 354 | return user_data |
|
355 | 355 | |
|
356 | 356 | except (LdapUsernameError, LdapPasswordError, LdapImportError): |
|
357 | 357 | log.error(traceback.format_exc()) |
|
358 | 358 | return None |
|
359 | 359 | except (Exception,): |
|
360 | 360 | log.error(traceback.format_exc()) |
|
361 | 361 | return None |
|
362 | 362 | |
|
363 | 363 | def get_managed_fields(self): |
|
364 | 364 | return ['username', 'firstname', 'lastname', 'email', 'password'] |
@@ -1,141 +1,141 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.auth_pam |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea authentication library for PAM |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Created on Apr 09, 2013 |
|
23 | 23 | :author: Alexey Larikov |
|
24 | 24 | """ |
|
25 | 25 | |
|
26 | 26 | import logging |
|
27 | 27 | import time |
|
28 | 28 | import pam |
|
29 | 29 | import pwd |
|
30 | 30 | import grp |
|
31 | 31 | import re |
|
32 | 32 | import socket |
|
33 | 33 | import threading |
|
34 | 34 | |
|
35 | 35 | from kallithea.lib import auth_modules |
|
36 | 36 | from kallithea.lib.compat import formatted_json, hybrid_property |
|
37 | 37 | |
|
38 | 38 | log = logging.getLogger(__name__) |
|
39 | 39 | |
|
40 | 40 | # Cache to store PAM authenticated users |
|
41 | 41 | _auth_cache = dict() |
|
42 | 42 | _pam_lock = threading.Lock() |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class KallitheaAuthPlugin(auth_modules.KallitheaExternalAuthPlugin): |
|
46 | 46 | # PAM authnetication can be slow. Repository operations involve a lot of |
|
47 | 47 | # auth calls. Little caching helps speedup push/pull operations significantly |
|
48 | 48 | AUTH_CACHE_TTL = 4 |
|
49 | 49 | |
|
50 | 50 | def __init__(self): |
|
51 | 51 | global _auth_cache |
|
52 | 52 | ts = time.time() |
|
53 | 53 | cleared_cache = dict( |
|
54 | 54 | [(k, v) for (k, v) in _auth_cache.items() if |
|
55 | 55 | (v + KallitheaAuthPlugin.AUTH_CACHE_TTL > ts)]) |
|
56 | 56 | _auth_cache = cleared_cache |
|
57 | 57 | |
|
58 | 58 | @hybrid_property |
|
59 | 59 | def name(self): |
|
60 | 60 | return "pam" |
|
61 | 61 | |
|
62 | 62 | def settings(self): |
|
63 | 63 | settings = [ |
|
64 | 64 | { |
|
65 | 65 | "name": "service", |
|
66 | 66 | "validator": self.validators.UnicodeString(strip=True), |
|
67 | 67 | "type": "string", |
|
68 | 68 | "description": "PAM service name to use for authentication", |
|
69 | 69 | "default": "login", |
|
70 | 70 | "formname": "PAM service name" |
|
71 | 71 | }, |
|
72 | 72 | { |
|
73 | 73 | "name": "gecos", |
|
74 | 74 | "validator": self.validators.UnicodeString(strip=True), |
|
75 | 75 | "type": "string", |
|
76 | 76 | "description": "Regex for extracting user name/email etc " |
|
77 | 77 | "from Unix userinfo", |
|
78 | 78 | "default": "(?P<last_name>.+),\s*(?P<first_name>\w+)", |
|
79 | 79 | "formname": "Gecos Regex" |
|
80 | 80 | } |
|
81 | 81 | ] |
|
82 | 82 | return settings |
|
83 | 83 | |
|
84 | 84 | def use_fake_password(self): |
|
85 | 85 | return True |
|
86 | 86 | |
|
87 | 87 | def auth(self, userobj, username, password, settings, **kwargs): |
|
88 | 88 | if username not in _auth_cache: |
|
89 | 89 | # Need lock here, as PAM authentication is not thread safe |
|
90 | 90 | _pam_lock.acquire() |
|
91 | 91 | try: |
|
92 | 92 | auth_result = pam.authenticate(username, password, |
|
93 | 93 | settings["service"]) |
|
94 | 94 | # cache result only if we properly authenticated |
|
95 | 95 | if auth_result: |
|
96 | 96 | _auth_cache[username] = time.time() |
|
97 | 97 | finally: |
|
98 | 98 | _pam_lock.release() |
|
99 | 99 | |
|
100 | 100 | if not auth_result: |
|
101 |
log.error("PAM was unable to authenticate user: %s" |
|
|
101 | log.error("PAM was unable to authenticate user: %s", username) | |
|
102 | 102 | return None |
|
103 | 103 | else: |
|
104 |
log.debug("Using cached auth for user: %s" |
|
|
104 | log.debug("Using cached auth for user: %s", username) | |
|
105 | 105 | |
|
106 | 106 | # old attrs fetched from Kallithea database |
|
107 | 107 | admin = getattr(userobj, 'admin', False) |
|
108 | 108 | active = getattr(userobj, 'active', True) |
|
109 | 109 | email = getattr(userobj, 'email', '') or "%s@%s" % (username, socket.gethostname()) |
|
110 | 110 | firstname = getattr(userobj, 'firstname', '') |
|
111 | 111 | lastname = getattr(userobj, 'lastname', '') |
|
112 | 112 | |
|
113 | 113 | user_data = { |
|
114 | 114 | 'username': username, |
|
115 | 115 | 'firstname': firstname, |
|
116 | 116 | 'lastname': lastname, |
|
117 | 117 | 'groups': [g.gr_name for g in grp.getgrall() if username in g.gr_mem], |
|
118 | 118 | 'email': email, |
|
119 | 119 | 'admin': admin, |
|
120 | 120 | 'active': active, |
|
121 | 121 | "active_from_extern": None, |
|
122 | 122 | 'extern_name': username, |
|
123 | 123 | } |
|
124 | 124 | |
|
125 | 125 | try: |
|
126 | 126 | user_data = pwd.getpwnam(username) |
|
127 | 127 | regex = settings["gecos"] |
|
128 | 128 | match = re.search(regex, user_data.pw_gecos) |
|
129 | 129 | if match: |
|
130 | 130 | user_data["firstname"] = match.group('first_name') |
|
131 | 131 | user_data["lastname"] = match.group('last_name') |
|
132 | 132 | except Exception: |
|
133 | 133 | log.warning("Cannot extract additional info for PAM user %s", username) |
|
134 | 134 | pass |
|
135 | 135 | |
|
136 |
log.debug("pamuser: \n%s" |
|
|
137 |
log.info('user %s authenticated correctly' |
|
|
136 | log.debug("pamuser: \n%s", formatted_json(user_data)) | |
|
137 | log.info('user %s authenticated correctly', user_data['username']) | |
|
138 | 138 | return user_data |
|
139 | 139 | |
|
140 | 140 | def get_managed_fields(self): |
|
141 | 141 | return ['username', 'password'] |
@@ -1,521 +1,521 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | 15 | """ |
|
16 | 16 | kallithea.lib.base |
|
17 | 17 | ~~~~~~~~~~~~~~~~~~ |
|
18 | 18 | |
|
19 | 19 | The base Controller API |
|
20 | 20 | Provides the BaseController class for subclassing. And usage in different |
|
21 | 21 | controllers |
|
22 | 22 | |
|
23 | 23 | This file was forked by the Kallithea project in July 2014. |
|
24 | 24 | Original author and date, and relevant copyright and licensing information is below: |
|
25 | 25 | :created_on: Oct 06, 2010 |
|
26 | 26 | :author: marcink |
|
27 | 27 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
28 | 28 | :license: GPLv3, see LICENSE.md for more details. |
|
29 | 29 | """ |
|
30 | 30 | |
|
31 | 31 | import datetime |
|
32 | 32 | import logging |
|
33 | 33 | import time |
|
34 | 34 | import traceback |
|
35 | 35 | |
|
36 | 36 | import webob.exc |
|
37 | 37 | import paste.httpexceptions |
|
38 | 38 | import paste.auth.basic |
|
39 | 39 | import paste.httpheaders |
|
40 | 40 | |
|
41 | 41 | from pylons import config, tmpl_context as c, request, session, url |
|
42 | 42 | from pylons.controllers import WSGIController |
|
43 | 43 | from pylons.controllers.util import redirect |
|
44 | 44 | from pylons.templating import render_mako as render # don't remove this import |
|
45 | 45 | from pylons.i18n.translation import _ |
|
46 | 46 | |
|
47 | 47 | from kallithea import __version__, BACKENDS |
|
48 | 48 | |
|
49 | 49 | from kallithea.lib.utils2 import str2bool, safe_unicode, AttributeDict,\ |
|
50 | 50 | safe_str, safe_int |
|
51 | 51 | from kallithea.lib import auth_modules |
|
52 | 52 | from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
53 | 53 | from kallithea.lib.utils import get_repo_slug |
|
54 | 54 | from kallithea.lib.exceptions import UserCreationError |
|
55 | 55 | from kallithea.lib.vcs.exceptions import RepositoryError, EmptyRepositoryError, ChangesetDoesNotExistError |
|
56 | 56 | from kallithea.model import meta |
|
57 | 57 | |
|
58 | 58 | from kallithea.model.db import Repository, Ui, User, Setting |
|
59 | 59 | from kallithea.model.notification import NotificationModel |
|
60 | 60 | from kallithea.model.scm import ScmModel |
|
61 | 61 | from kallithea.model.pull_request import PullRequestModel |
|
62 | 62 | |
|
63 | 63 | log = logging.getLogger(__name__) |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | def _filter_proxy(ip): |
|
67 | 67 | """ |
|
68 | 68 | HEADERS can have multiple ips inside the left-most being the original |
|
69 | 69 | client, and each successive proxy that passed the request adding the IP |
|
70 | 70 | address where it received the request from. |
|
71 | 71 | |
|
72 | 72 | :param ip: |
|
73 | 73 | """ |
|
74 | 74 | if ',' in ip: |
|
75 | 75 | _ips = ip.split(',') |
|
76 | 76 | _first_ip = _ips[0].strip() |
|
77 |
log.debug('Got multiple IPs %s, using %s' |
|
|
77 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) | |
|
78 | 78 | return _first_ip |
|
79 | 79 | return ip |
|
80 | 80 | |
|
81 | 81 | |
|
82 | 82 | def _get_ip_addr(environ): |
|
83 | 83 | proxy_key = 'HTTP_X_REAL_IP' |
|
84 | 84 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
85 | 85 | def_key = 'REMOTE_ADDR' |
|
86 | 86 | |
|
87 | 87 | ip = environ.get(proxy_key) |
|
88 | 88 | if ip: |
|
89 | 89 | return _filter_proxy(ip) |
|
90 | 90 | |
|
91 | 91 | ip = environ.get(proxy_key2) |
|
92 | 92 | if ip: |
|
93 | 93 | return _filter_proxy(ip) |
|
94 | 94 | |
|
95 | 95 | ip = environ.get(def_key, '0.0.0.0') |
|
96 | 96 | return _filter_proxy(ip) |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | def _get_access_path(environ): |
|
100 | 100 | path = environ.get('PATH_INFO') |
|
101 | 101 | org_req = environ.get('pylons.original_request') |
|
102 | 102 | if org_req: |
|
103 | 103 | path = org_req.environ.get('PATH_INFO') |
|
104 | 104 | return path |
|
105 | 105 | |
|
106 | 106 | |
|
107 | 107 | def log_in_user(user, remember, is_external_auth): |
|
108 | 108 | """ |
|
109 | 109 | Log a `User` in and update session and cookies. If `remember` is True, |
|
110 | 110 | the session cookie is set to expire in a year; otherwise, it expires at |
|
111 | 111 | the end of the browser session. |
|
112 | 112 | |
|
113 | 113 | Returns populated `AuthUser` object. |
|
114 | 114 | """ |
|
115 | 115 | user.update_lastlogin() |
|
116 | 116 | meta.Session().commit() |
|
117 | 117 | |
|
118 | 118 | auth_user = AuthUser(dbuser=user, |
|
119 | 119 | is_external_auth=is_external_auth) |
|
120 | 120 | auth_user.set_authenticated() |
|
121 | 121 | |
|
122 | 122 | # Start new session to prevent session fixation attacks. |
|
123 | 123 | session.invalidate() |
|
124 | 124 | session['authuser'] = cookie = auth_user.to_cookie() |
|
125 | 125 | |
|
126 | 126 | # If they want to be remembered, update the cookie |
|
127 | 127 | if remember: |
|
128 | 128 | t = datetime.datetime.now() + datetime.timedelta(days=365) |
|
129 | 129 | session._set_cookie_expires(t) |
|
130 | 130 | |
|
131 | 131 | session.save() |
|
132 | 132 | |
|
133 | 133 | log.info('user %s is now authenticated and stored in ' |
|
134 | 134 | 'session, session attrs %s', user.username, cookie) |
|
135 | 135 | |
|
136 | 136 | # dumps session attrs back to cookie |
|
137 | 137 | session._update_cookie_out() |
|
138 | 138 | |
|
139 | 139 | return auth_user |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | class BasicAuth(paste.auth.basic.AuthBasicAuthenticator): |
|
143 | 143 | |
|
144 | 144 | def __init__(self, realm, authfunc, auth_http_code=None): |
|
145 | 145 | self.realm = realm |
|
146 | 146 | self.authfunc = authfunc |
|
147 | 147 | self._rc_auth_http_code = auth_http_code |
|
148 | 148 | |
|
149 | 149 | def build_authentication(self): |
|
150 | 150 | head = paste.httpheaders.WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
151 | 151 | if self._rc_auth_http_code and self._rc_auth_http_code == '403': |
|
152 | 152 | # return 403 if alternative http return code is specified in |
|
153 | 153 | # Kallithea config |
|
154 | 154 | return paste.httpexceptions.HTTPForbidden(headers=head) |
|
155 | 155 | return paste.httpexceptions.HTTPUnauthorized(headers=head) |
|
156 | 156 | |
|
157 | 157 | def authenticate(self, environ): |
|
158 | 158 | authorization = paste.httpheaders.AUTHORIZATION(environ) |
|
159 | 159 | if not authorization: |
|
160 | 160 | return self.build_authentication() |
|
161 | 161 | (authmeth, auth) = authorization.split(' ', 1) |
|
162 | 162 | if 'basic' != authmeth.lower(): |
|
163 | 163 | return self.build_authentication() |
|
164 | 164 | auth = auth.strip().decode('base64') |
|
165 | 165 | _parts = auth.split(':', 1) |
|
166 | 166 | if len(_parts) == 2: |
|
167 | 167 | username, password = _parts |
|
168 | 168 | if self.authfunc(username, password, environ) is not None: |
|
169 | 169 | return username |
|
170 | 170 | return self.build_authentication() |
|
171 | 171 | |
|
172 | 172 | __call__ = authenticate |
|
173 | 173 | |
|
174 | 174 | |
|
175 | 175 | class BaseVCSController(object): |
|
176 | 176 | |
|
177 | 177 | def __init__(self, application, config): |
|
178 | 178 | self.application = application |
|
179 | 179 | self.config = config |
|
180 | 180 | # base path of repo locations |
|
181 | 181 | self.basepath = self.config['base_path'] |
|
182 | 182 | # authenticate this VCS request using the authentication modules |
|
183 | 183 | self.authenticate = BasicAuth('', auth_modules.authenticate, |
|
184 | 184 | config.get('auth_ret_code')) |
|
185 | 185 | self.ip_addr = '0.0.0.0' |
|
186 | 186 | |
|
187 | 187 | def _handle_request(self, environ, start_response): |
|
188 | 188 | raise NotImplementedError() |
|
189 | 189 | |
|
190 | 190 | def _get_by_id(self, repo_name): |
|
191 | 191 | """ |
|
192 | 192 | Gets a special pattern _<ID> from clone url and tries to replace it |
|
193 | 193 | with a repository_name for support of _<ID> permanent URLs |
|
194 | 194 | |
|
195 | 195 | :param repo_name: |
|
196 | 196 | """ |
|
197 | 197 | |
|
198 | 198 | data = repo_name.split('/') |
|
199 | 199 | if len(data) >= 2: |
|
200 | 200 | from kallithea.lib.utils import get_repo_by_id |
|
201 | 201 | by_id_match = get_repo_by_id(repo_name) |
|
202 | 202 | if by_id_match: |
|
203 | 203 | data[1] = by_id_match |
|
204 | 204 | |
|
205 | 205 | return '/'.join(data) |
|
206 | 206 | |
|
207 | 207 | def _invalidate_cache(self, repo_name): |
|
208 | 208 | """ |
|
209 | 209 | Sets cache for this repository for invalidation on next access |
|
210 | 210 | |
|
211 | 211 | :param repo_name: full repo name, also a cache key |
|
212 | 212 | """ |
|
213 | 213 | ScmModel().mark_for_invalidation(repo_name) |
|
214 | 214 | |
|
215 | 215 | def _check_permission(self, action, user, repo_name, ip_addr=None): |
|
216 | 216 | """ |
|
217 | 217 | Checks permissions using action (push/pull) user and repository |
|
218 | 218 | name |
|
219 | 219 | |
|
220 | 220 | :param action: push or pull action |
|
221 | 221 | :param user: `User` instance |
|
222 | 222 | :param repo_name: repository name |
|
223 | 223 | """ |
|
224 | 224 | # check IP |
|
225 | 225 | ip_allowed = AuthUser.check_ip_allowed(user, ip_addr) |
|
226 | 226 | if ip_allowed: |
|
227 |
log.info('Access for IP:%s allowed' |
|
|
227 | log.info('Access for IP:%s allowed', ip_addr) | |
|
228 | 228 | else: |
|
229 | 229 | return False |
|
230 | 230 | |
|
231 | 231 | if action == 'push': |
|
232 | 232 | if not HasPermissionAnyMiddleware('repository.write', |
|
233 | 233 | 'repository.admin')(user, |
|
234 | 234 | repo_name): |
|
235 | 235 | return False |
|
236 | 236 | |
|
237 | 237 | else: |
|
238 | 238 | #any other action need at least read permission |
|
239 | 239 | if not HasPermissionAnyMiddleware('repository.read', |
|
240 | 240 | 'repository.write', |
|
241 | 241 | 'repository.admin')(user, |
|
242 | 242 | repo_name): |
|
243 | 243 | return False |
|
244 | 244 | |
|
245 | 245 | return True |
|
246 | 246 | |
|
247 | 247 | def _get_ip_addr(self, environ): |
|
248 | 248 | return _get_ip_addr(environ) |
|
249 | 249 | |
|
250 | 250 | def _check_ssl(self, environ): |
|
251 | 251 | """ |
|
252 | 252 | Checks the SSL check flag and returns False if SSL is not present |
|
253 | 253 | and required True otherwise |
|
254 | 254 | """ |
|
255 | 255 | #check if we have SSL required ! if not it's a bad request ! |
|
256 | 256 | if str2bool(Ui.get_by_key('push_ssl').ui_value): |
|
257 | 257 | org_proto = environ.get('wsgi._org_proto', environ['wsgi.url_scheme']) |
|
258 | 258 | if org_proto != 'https': |
|
259 | log.debug('proto is %s and SSL is required BAD REQUEST !' | |
|
260 |
|
|
|
259 | log.debug('proto is %s and SSL is required BAD REQUEST !', | |
|
260 | org_proto) | |
|
261 | 261 | return False |
|
262 | 262 | return True |
|
263 | 263 | |
|
264 | 264 | def _check_locking_state(self, environ, action, repo, user_id): |
|
265 | 265 | """ |
|
266 | 266 | Checks locking on this repository, if locking is enabled and lock is |
|
267 | 267 | present returns a tuple of make_lock, locked, locked_by. |
|
268 | 268 | make_lock can have 3 states None (do nothing) True, make lock |
|
269 | 269 | False release lock, This value is later propagated to hooks, which |
|
270 | 270 | do the locking. Think about this as signals passed to hooks what to do. |
|
271 | 271 | |
|
272 | 272 | """ |
|
273 | 273 | locked = False # defines that locked error should be thrown to user |
|
274 | 274 | make_lock = None |
|
275 | 275 | repo = Repository.get_by_repo_name(repo) |
|
276 | 276 | user = User.get(user_id) |
|
277 | 277 | |
|
278 | 278 | # this is kind of hacky, but due to how mercurial handles client-server |
|
279 | 279 | # server see all operation on changeset; bookmarks, phases and |
|
280 | 280 | # obsolescence marker in different transaction, we don't want to check |
|
281 | 281 | # locking on those |
|
282 | 282 | obsolete_call = environ['QUERY_STRING'] in ['cmd=listkeys',] |
|
283 | 283 | locked_by = repo.locked |
|
284 | 284 | if repo and repo.enable_locking and not obsolete_call: |
|
285 | 285 | if action == 'push': |
|
286 | 286 | #check if it's already locked !, if it is compare users |
|
287 | 287 | user_id, _date = repo.locked |
|
288 | 288 | if user.user_id == user_id: |
|
289 |
log.debug('Got push from user %s, now unlocking' |
|
|
289 | log.debug('Got push from user %s, now unlocking', user) | |
|
290 | 290 | # unlock if we have push from user who locked |
|
291 | 291 | make_lock = False |
|
292 | 292 | else: |
|
293 | 293 | # we're not the same user who locked, ban with 423 ! |
|
294 | 294 | locked = True |
|
295 | 295 | if action == 'pull': |
|
296 | 296 | if repo.locked[0] and repo.locked[1]: |
|
297 | 297 | locked = True |
|
298 | 298 | else: |
|
299 |
log.debug('Setting lock on repo %s by %s' |
|
|
299 | log.debug('Setting lock on repo %s by %s', repo, user) | |
|
300 | 300 | make_lock = True |
|
301 | 301 | |
|
302 | 302 | else: |
|
303 |
log.debug('Repository %s do not have locking enabled' |
|
|
304 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s' | |
|
305 |
|
|
|
303 | log.debug('Repository %s do not have locking enabled', repo) | |
|
304 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', | |
|
305 | make_lock, locked, locked_by) | |
|
306 | 306 | return make_lock, locked, locked_by |
|
307 | 307 | |
|
308 | 308 | def __call__(self, environ, start_response): |
|
309 | 309 | start = time.time() |
|
310 | 310 | try: |
|
311 | 311 | return self._handle_request(environ, start_response) |
|
312 | 312 | finally: |
|
313 | 313 | log = logging.getLogger('kallithea.' + self.__class__.__name__) |
|
314 |
log.debug('Request time: %.3fs' |
|
|
314 | log.debug('Request time: %.3fs', time.time() - start) | |
|
315 | 315 | meta.Session.remove() |
|
316 | 316 | |
|
317 | 317 | |
|
318 | 318 | class BaseController(WSGIController): |
|
319 | 319 | |
|
320 | 320 | def __before__(self): |
|
321 | 321 | """ |
|
322 | 322 | __before__ is called before controller methods and after __call__ |
|
323 | 323 | """ |
|
324 | 324 | c.kallithea_version = __version__ |
|
325 | 325 | rc_config = Setting.get_app_settings() |
|
326 | 326 | |
|
327 | 327 | # Visual options |
|
328 | 328 | c.visual = AttributeDict({}) |
|
329 | 329 | |
|
330 | 330 | ## DB stored |
|
331 | 331 | c.visual.show_public_icon = str2bool(rc_config.get('show_public_icon')) |
|
332 | 332 | c.visual.show_private_icon = str2bool(rc_config.get('show_private_icon')) |
|
333 | 333 | c.visual.stylify_metatags = str2bool(rc_config.get('stylify_metatags')) |
|
334 | 334 | c.visual.dashboard_items = safe_int(rc_config.get('dashboard_items', 100)) |
|
335 | 335 | c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100)) |
|
336 | 336 | c.visual.repository_fields = str2bool(rc_config.get('repository_fields')) |
|
337 | 337 | c.visual.show_version = str2bool(rc_config.get('show_version')) |
|
338 | 338 | c.visual.use_gravatar = str2bool(rc_config.get('use_gravatar')) |
|
339 | 339 | c.visual.gravatar_url = rc_config.get('gravatar_url') |
|
340 | 340 | |
|
341 | 341 | c.ga_code = rc_config.get('ga_code') |
|
342 | 342 | # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code |
|
343 | 343 | if c.ga_code and '<' not in c.ga_code: |
|
344 | 344 | c.ga_code = '''<script type="text/javascript"> |
|
345 | 345 | var _gaq = _gaq || []; |
|
346 | 346 | _gaq.push(['_setAccount', '%s']); |
|
347 | 347 | _gaq.push(['_trackPageview']); |
|
348 | 348 | |
|
349 | 349 | (function() { |
|
350 | 350 | var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; |
|
351 | 351 | ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; |
|
352 | 352 | var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); |
|
353 | 353 | })(); |
|
354 | 354 | </script>''' % c.ga_code |
|
355 | 355 | c.site_name = rc_config.get('title') |
|
356 | 356 | c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') |
|
357 | 357 | |
|
358 | 358 | ## INI stored |
|
359 | 359 | c.visual.allow_repo_location_change = str2bool(config.get('allow_repo_location_change', True)) |
|
360 | 360 | c.visual.allow_custom_hooks_settings = str2bool(config.get('allow_custom_hooks_settings', True)) |
|
361 | 361 | |
|
362 | 362 | c.instance_id = config.get('instance_id') |
|
363 | 363 | c.issues_url = config.get('bugtracker', url('issues_url')) |
|
364 | 364 | # END CONFIG VARS |
|
365 | 365 | |
|
366 | 366 | c.repo_name = get_repo_slug(request) # can be empty |
|
367 | 367 | c.backends = BACKENDS.keys() |
|
368 | 368 | c.unread_notifications = NotificationModel()\ |
|
369 | 369 | .get_unread_cnt_for_user(c.authuser.user_id) |
|
370 | 370 | |
|
371 | 371 | self.cut_off_limit = safe_int(config.get('cut_off_limit')) |
|
372 | 372 | |
|
373 | 373 | c.my_pr_count = PullRequestModel().get_pullrequest_cnt_for_user(c.authuser.user_id) |
|
374 | 374 | |
|
375 | 375 | self.sa = meta.Session |
|
376 | 376 | self.scm_model = ScmModel(self.sa) |
|
377 | 377 | |
|
378 | 378 | @staticmethod |
|
379 | 379 | def _determine_auth_user(api_key, session_authuser): |
|
380 | 380 | """ |
|
381 | 381 | Create an `AuthUser` object given the API key (if any) and the |
|
382 | 382 | value of the authuser session cookie. |
|
383 | 383 | """ |
|
384 | 384 | |
|
385 | 385 | # Authenticate by API key |
|
386 | 386 | if api_key: |
|
387 | 387 | # when using API_KEY we are sure user exists. |
|
388 | 388 | return AuthUser(dbuser=User.get_by_api_key(api_key), |
|
389 | 389 | is_external_auth=True) |
|
390 | 390 | |
|
391 | 391 | # Authenticate by session cookie |
|
392 | 392 | # In ancient login sessions, 'authuser' may not be a dict. |
|
393 | 393 | # In that case, the user will have to log in again. |
|
394 | 394 | if isinstance(session_authuser, dict): |
|
395 | 395 | try: |
|
396 | 396 | return AuthUser.from_cookie(session_authuser) |
|
397 | 397 | except UserCreationError as e: |
|
398 | 398 | # container auth or other auth functions that create users on |
|
399 | 399 | # the fly can throw UserCreationError to signal issues with |
|
400 | 400 | # user creation. Explanation should be provided in the |
|
401 | 401 | # exception object. |
|
402 | 402 | from kallithea.lib import helpers as h |
|
403 | 403 | h.flash(e, 'error', logf=log.error) |
|
404 | 404 | |
|
405 | 405 | # Authenticate by auth_container plugin (if enabled) |
|
406 | 406 | if any( |
|
407 | 407 | auth_modules.importplugin(name).is_container_auth |
|
408 | 408 | for name in Setting.get_auth_plugins() |
|
409 | 409 | ): |
|
410 | 410 | try: |
|
411 | 411 | user_info = auth_modules.authenticate('', '', request.environ) |
|
412 | 412 | except UserCreationError as e: |
|
413 | 413 | from kallithea.lib import helpers as h |
|
414 | 414 | h.flash(e, 'error', logf=log.error) |
|
415 | 415 | else: |
|
416 | 416 | if user_info is not None: |
|
417 | 417 | username = user_info['username'] |
|
418 | 418 | user = User.get_by_username(username, case_insensitive=True) |
|
419 | 419 | return log_in_user(user, remember=False, |
|
420 | 420 | is_external_auth=True) |
|
421 | 421 | |
|
422 | 422 | # User is anonymous |
|
423 | 423 | return AuthUser() |
|
424 | 424 | |
|
425 | 425 | def __call__(self, environ, start_response): |
|
426 | 426 | """Invoke the Controller""" |
|
427 | 427 | |
|
428 | 428 | # WSGIController.__call__ dispatches to the Controller method |
|
429 | 429 | # the request is routed to. This routing information is |
|
430 | 430 | # available in environ['pylons.routes_dict'] |
|
431 | 431 | try: |
|
432 | 432 | self.ip_addr = _get_ip_addr(environ) |
|
433 | 433 | # make sure that we update permissions each time we call controller |
|
434 | 434 | |
|
435 | 435 | #set globals for auth user |
|
436 | 436 | self.authuser = c.authuser = request.user = self._determine_auth_user( |
|
437 | 437 | request.GET.get('api_key'), |
|
438 | 438 | session.get('authuser'), |
|
439 | 439 | ) |
|
440 | 440 | |
|
441 | 441 | log.info('IP: %s User: %s accessed %s', |
|
442 | 442 | self.ip_addr, self.authuser, |
|
443 | 443 | safe_unicode(_get_access_path(environ)), |
|
444 | 444 | ) |
|
445 | 445 | return WSGIController.__call__(self, environ, start_response) |
|
446 | 446 | finally: |
|
447 | 447 | meta.Session.remove() |
|
448 | 448 | |
|
449 | 449 | |
|
450 | 450 | class BaseRepoController(BaseController): |
|
451 | 451 | """ |
|
452 | 452 | Base class for controllers responsible for loading all needed data for |
|
453 | 453 | repository loaded items are |
|
454 | 454 | |
|
455 | 455 | c.db_repo_scm_instance: instance of scm repository |
|
456 | 456 | c.db_repo: instance of db |
|
457 | 457 | c.repository_followers: number of followers |
|
458 | 458 | c.repository_forks: number of forks |
|
459 | 459 | c.repository_following: weather the current user is following the current repo |
|
460 | 460 | """ |
|
461 | 461 | |
|
462 | 462 | def __before__(self): |
|
463 | 463 | super(BaseRepoController, self).__before__() |
|
464 | 464 | if c.repo_name: # extracted from routes |
|
465 | 465 | _dbr = Repository.get_by_repo_name(c.repo_name) |
|
466 | 466 | if not _dbr: |
|
467 | 467 | return |
|
468 | 468 | |
|
469 | log.debug('Found repository in database %s with state `%s`' | |
|
470 |
|
|
|
469 | log.debug('Found repository in database %s with state `%s`', | |
|
470 | safe_unicode(_dbr), safe_unicode(_dbr.repo_state)) | |
|
471 | 471 | route = getattr(request.environ.get('routes.route'), 'name', '') |
|
472 | 472 | |
|
473 | 473 | # allow to delete repos that are somehow damages in filesystem |
|
474 | 474 | if route in ['delete_repo']: |
|
475 | 475 | return |
|
476 | 476 | |
|
477 | 477 | if _dbr.repo_state in [Repository.STATE_PENDING]: |
|
478 | 478 | if route in ['repo_creating_home']: |
|
479 | 479 | return |
|
480 | 480 | check_url = url('repo_creating_home', repo_name=c.repo_name) |
|
481 | 481 | return redirect(check_url) |
|
482 | 482 | |
|
483 | 483 | dbr = c.db_repo = _dbr |
|
484 | 484 | c.db_repo_scm_instance = c.db_repo.scm_instance |
|
485 | 485 | if c.db_repo_scm_instance is None: |
|
486 | 486 | log.error('%s this repository is present in database but it ' |
|
487 | 487 | 'cannot be created as an scm instance', c.repo_name) |
|
488 | 488 | from kallithea.lib import helpers as h |
|
489 | 489 | h.flash(h.literal(_('Repository not found in the filesystem')), |
|
490 | 490 | category='error') |
|
491 | 491 | raise paste.httpexceptions.HTTPNotFound() |
|
492 | 492 | |
|
493 | 493 | # some globals counter for menu |
|
494 | 494 | c.repository_followers = self.scm_model.get_followers(dbr) |
|
495 | 495 | c.repository_forks = self.scm_model.get_forks(dbr) |
|
496 | 496 | c.repository_pull_requests = self.scm_model.get_pull_requests(dbr) |
|
497 | 497 | c.repository_following = self.scm_model.is_following_repo( |
|
498 | 498 | c.repo_name, self.authuser.user_id) |
|
499 | 499 | |
|
500 | 500 | @staticmethod |
|
501 | 501 | def _get_ref_rev(repo, ref_type, ref_name, returnempty=False): |
|
502 | 502 | """ |
|
503 | 503 | Safe way to get changeset. If error occurs show error. |
|
504 | 504 | """ |
|
505 | 505 | from kallithea.lib import helpers as h |
|
506 | 506 | try: |
|
507 | 507 | return repo.scm_instance.get_ref_revision(ref_type, ref_name) |
|
508 | 508 | except EmptyRepositoryError as e: |
|
509 | 509 | if returnempty: |
|
510 | 510 | return repo.scm_instance.EMPTY_CHANGESET |
|
511 | 511 | h.flash(h.literal(_('There are no changesets yet')), |
|
512 | 512 | category='error') |
|
513 | 513 | raise webob.exc.HTTPNotFound() |
|
514 | 514 | except ChangesetDoesNotExistError as e: |
|
515 | 515 | h.flash(h.literal(_('Changeset not found')), |
|
516 | 516 | category='error') |
|
517 | 517 | raise webob.exc.HTTPNotFound() |
|
518 | 518 | except RepositoryError as e: |
|
519 | 519 | log.error(traceback.format_exc()) |
|
520 | 520 | h.flash(safe_str(e), category='error') |
|
521 | 521 | raise webob.exc.HTTPBadRequest() |
@@ -1,129 +1,129 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.celerylib.__init__ |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | celery libs for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Nov 27, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | import socket |
|
30 | 30 | import traceback |
|
31 | 31 | import logging |
|
32 | 32 | from os.path import join as jn |
|
33 | 33 | from pylons import config |
|
34 | 34 | |
|
35 | 35 | from hashlib import md5 |
|
36 | 36 | from decorator import decorator |
|
37 | 37 | |
|
38 | 38 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
39 | 39 | from kallithea import CELERY_ON, CELERY_EAGER |
|
40 | 40 | from kallithea.lib.utils2 import str2bool, safe_str |
|
41 | 41 | from kallithea.lib.pidlock import DaemonLock, LockHeld |
|
42 | 42 | from kallithea.model import init_model |
|
43 | 43 | from kallithea.model import meta |
|
44 | 44 | |
|
45 | 45 | from sqlalchemy import engine_from_config |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | log = logging.getLogger(__name__) |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | class ResultWrapper(object): |
|
52 | 52 | def __init__(self, task): |
|
53 | 53 | self.task = task |
|
54 | 54 | |
|
55 | 55 | @LazyProperty |
|
56 | 56 | def result(self): |
|
57 | 57 | return self.task |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | def run_task(task, *args, **kwargs): |
|
61 | 61 | global CELERY_ON |
|
62 | 62 | if CELERY_ON: |
|
63 | 63 | try: |
|
64 | 64 | t = task.apply_async(args=args, kwargs=kwargs) |
|
65 |
log.info('running task %s:%s' |
|
|
65 | log.info('running task %s:%s', t.task_id, task) | |
|
66 | 66 | return t |
|
67 | 67 | |
|
68 | 68 | except socket.error as e: |
|
69 | 69 | if isinstance(e, IOError) and e.errno == 111: |
|
70 | 70 | log.debug('Unable to connect to celeryd. Sync execution') |
|
71 | 71 | CELERY_ON = False |
|
72 | 72 | else: |
|
73 | 73 | log.error(traceback.format_exc()) |
|
74 | 74 | except KeyError as e: |
|
75 | 75 | log.debug('Unable to connect to celeryd. Sync execution') |
|
76 | 76 | except Exception as e: |
|
77 | 77 | log.error(traceback.format_exc()) |
|
78 | 78 | |
|
79 |
log.debug('executing task %s in sync mode' |
|
|
79 | log.debug('executing task %s in sync mode', task) | |
|
80 | 80 | return ResultWrapper(task(*args, **kwargs)) |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | def __get_lockkey(func, *fargs, **fkwargs): |
|
84 | 84 | params = list(fargs) |
|
85 | 85 | params.extend(['%s-%s' % ar for ar in fkwargs.items()]) |
|
86 | 86 | |
|
87 | 87 | func_name = str(func.__name__) if hasattr(func, '__name__') else str(func) |
|
88 | 88 | |
|
89 | 89 | lockkey = 'task_%s.lock' % \ |
|
90 | 90 | md5(func_name + '-' + '-'.join(map(safe_str, params))).hexdigest() |
|
91 | 91 | return lockkey |
|
92 | 92 | |
|
93 | 93 | |
|
94 | 94 | def locked_task(func): |
|
95 | 95 | def __wrapper(func, *fargs, **fkwargs): |
|
96 | 96 | lockkey = __get_lockkey(func, *fargs, **fkwargs) |
|
97 | 97 | lockkey_path = config['app_conf']['cache_dir'] |
|
98 | 98 | |
|
99 |
log.info('running task with lockkey %s' |
|
|
99 | log.info('running task with lockkey %s', lockkey) | |
|
100 | 100 | try: |
|
101 | 101 | l = DaemonLock(file_=jn(lockkey_path, lockkey)) |
|
102 | 102 | ret = func(*fargs, **fkwargs) |
|
103 | 103 | l.release() |
|
104 | 104 | return ret |
|
105 | 105 | except LockHeld: |
|
106 | 106 | log.info('LockHeld') |
|
107 | 107 | return 'Task with key %s already running' % lockkey |
|
108 | 108 | |
|
109 | 109 | return decorator(__wrapper, func) |
|
110 | 110 | |
|
111 | 111 | |
|
112 | 112 | def get_session(): |
|
113 | 113 | if CELERY_ON: |
|
114 | 114 | engine = engine_from_config(config, 'sqlalchemy.db1.') |
|
115 | 115 | init_model(engine) |
|
116 | 116 | sa = meta.Session() |
|
117 | 117 | return sa |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | def dbsession(func): |
|
121 | 121 | def __wrapper(func, *fargs, **fkwargs): |
|
122 | 122 | try: |
|
123 | 123 | ret = func(*fargs, **fkwargs) |
|
124 | 124 | return ret |
|
125 | 125 | finally: |
|
126 | 126 | if CELERY_ON and not CELERY_EAGER: |
|
127 | 127 | meta.Session.remove() |
|
128 | 128 | |
|
129 | 129 | return decorator(__wrapper, func) |
@@ -1,501 +1,501 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.celerylib.tasks |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Kallithea task modules, containing all task that suppose to be run |
|
19 | 19 | by celery daemon |
|
20 | 20 | |
|
21 | 21 | This file was forked by the Kallithea project in July 2014. |
|
22 | 22 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | 23 | :created_on: Oct 6, 2010 |
|
24 | 24 | :author: marcink |
|
25 | 25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | 26 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | from celery.decorators import task |
|
30 | 30 | |
|
31 | 31 | import os |
|
32 | 32 | import traceback |
|
33 | 33 | import logging |
|
34 | 34 | from os.path import join as jn |
|
35 | 35 | |
|
36 | 36 | from time import mktime |
|
37 | 37 | from operator import itemgetter |
|
38 | 38 | from string import lower |
|
39 | 39 | |
|
40 | 40 | from pylons import config |
|
41 | 41 | |
|
42 | 42 | from kallithea import CELERY_ON |
|
43 | 43 | from kallithea.lib.celerylib import run_task, locked_task, dbsession, \ |
|
44 | 44 | str2bool, __get_lockkey, LockHeld, DaemonLock, get_session |
|
45 | 45 | from kallithea.lib.helpers import person |
|
46 | 46 | from kallithea.lib.rcmail.smtp_mailer import SmtpMailer |
|
47 | 47 | from kallithea.lib.utils import add_cache, action_logger |
|
48 | 48 | from kallithea.lib.compat import json, OrderedDict |
|
49 | 49 | from kallithea.lib.hooks import log_create_repository |
|
50 | 50 | |
|
51 | 51 | from kallithea.model.db import Statistics, Repository, User |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | add_cache(config) # pragma: no cover |
|
55 | 55 | |
|
56 | 56 | __all__ = ['whoosh_index', 'get_commits_stats', 'send_email'] |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def get_logger(cls): |
|
60 | 60 | if CELERY_ON: |
|
61 | 61 | try: |
|
62 | 62 | return cls.get_logger() |
|
63 | 63 | except AttributeError: |
|
64 | 64 | pass |
|
65 | 65 | return logging.getLogger(__name__) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | @task(ignore_result=True) |
|
69 | 69 | @locked_task |
|
70 | 70 | @dbsession |
|
71 | 71 | def whoosh_index(repo_location, full_index): |
|
72 | 72 | from kallithea.lib.indexers.daemon import WhooshIndexingDaemon |
|
73 | 73 | DBS = get_session() |
|
74 | 74 | |
|
75 | 75 | index_location = config['index_dir'] |
|
76 | 76 | WhooshIndexingDaemon(index_location=index_location, |
|
77 | 77 | repo_location=repo_location, sa=DBS)\ |
|
78 | 78 | .run(full_index=full_index) |
|
79 | 79 | |
|
80 | 80 | |
|
81 | 81 | @task(ignore_result=True) |
|
82 | 82 | @dbsession |
|
83 | 83 | def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): |
|
84 | 84 | log = get_logger(get_commits_stats) |
|
85 | 85 | DBS = get_session() |
|
86 | 86 | lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, |
|
87 | 87 | ts_max_y) |
|
88 | 88 | lockkey_path = config['app_conf']['cache_dir'] |
|
89 | 89 | |
|
90 |
log.info('running task with lockkey %s' |
|
|
90 | log.info('running task with lockkey %s', lockkey) | |
|
91 | 91 | |
|
92 | 92 | try: |
|
93 | 93 | lock = l = DaemonLock(file_=jn(lockkey_path, lockkey)) |
|
94 | 94 | |
|
95 | 95 | # for js data compatibility cleans the key for person from ' |
|
96 | 96 | akc = lambda k: person(k).replace('"', "") |
|
97 | 97 | |
|
98 | 98 | co_day_auth_aggr = {} |
|
99 | 99 | commits_by_day_aggregate = {} |
|
100 | 100 | repo = Repository.get_by_repo_name(repo_name) |
|
101 | 101 | if repo is None: |
|
102 | 102 | return True |
|
103 | 103 | |
|
104 | 104 | repo = repo.scm_instance |
|
105 | 105 | repo_size = repo.count() |
|
106 | 106 | # return if repo have no revisions |
|
107 | 107 | if repo_size < 1: |
|
108 | 108 | lock.release() |
|
109 | 109 | return True |
|
110 | 110 | |
|
111 | 111 | skip_date_limit = True |
|
112 | 112 | parse_limit = int(config['app_conf'].get('commit_parse_limit')) |
|
113 | 113 | last_rev = None |
|
114 | 114 | last_cs = None |
|
115 | 115 | timegetter = itemgetter('time') |
|
116 | 116 | |
|
117 | 117 | dbrepo = DBS.query(Repository)\ |
|
118 | 118 | .filter(Repository.repo_name == repo_name).scalar() |
|
119 | 119 | cur_stats = DBS.query(Statistics)\ |
|
120 | 120 | .filter(Statistics.repository == dbrepo).scalar() |
|
121 | 121 | |
|
122 | 122 | if cur_stats is not None: |
|
123 | 123 | last_rev = cur_stats.stat_on_revision |
|
124 | 124 | |
|
125 | 125 | if last_rev == repo.get_changeset().revision and repo_size > 1: |
|
126 | 126 | # pass silently without any work if we're not on first revision or |
|
127 | 127 | # current state of parsing revision(from db marker) is the |
|
128 | 128 | # last revision |
|
129 | 129 | lock.release() |
|
130 | 130 | return True |
|
131 | 131 | |
|
132 | 132 | if cur_stats: |
|
133 | 133 | commits_by_day_aggregate = OrderedDict(json.loads( |
|
134 | 134 | cur_stats.commit_activity_combined)) |
|
135 | 135 | co_day_auth_aggr = json.loads(cur_stats.commit_activity) |
|
136 | 136 | |
|
137 |
log.debug('starting parsing %s' |
|
|
137 | log.debug('starting parsing %s', parse_limit) | |
|
138 | 138 | lmktime = mktime |
|
139 | 139 | |
|
140 | 140 | last_rev = last_rev + 1 if last_rev >= 0 else 0 |
|
141 |
log.debug('Getting revisions from %s to %s' |
|
|
142 |
last_rev, last_rev + parse_limit |
|
|
141 | log.debug('Getting revisions from %s to %s', | |
|
142 | last_rev, last_rev + parse_limit | |
|
143 | 143 | ) |
|
144 | 144 | for cs in repo[last_rev:last_rev + parse_limit]: |
|
145 |
log.debug('parsing %s' |
|
|
145 | log.debug('parsing %s', cs) | |
|
146 | 146 | last_cs = cs # remember last parsed changeset |
|
147 | 147 | k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], |
|
148 | 148 | cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) |
|
149 | 149 | |
|
150 | 150 | if akc(cs.author) in co_day_auth_aggr: |
|
151 | 151 | try: |
|
152 | 152 | l = [timegetter(x) for x in |
|
153 | 153 | co_day_auth_aggr[akc(cs.author)]['data']] |
|
154 | 154 | time_pos = l.index(k) |
|
155 | 155 | except ValueError: |
|
156 | 156 | time_pos = None |
|
157 | 157 | |
|
158 | 158 | if time_pos >= 0 and time_pos is not None: |
|
159 | 159 | |
|
160 | 160 | datadict = \ |
|
161 | 161 | co_day_auth_aggr[akc(cs.author)]['data'][time_pos] |
|
162 | 162 | |
|
163 | 163 | datadict["commits"] += 1 |
|
164 | 164 | datadict["added"] += len(cs.added) |
|
165 | 165 | datadict["changed"] += len(cs.changed) |
|
166 | 166 | datadict["removed"] += len(cs.removed) |
|
167 | 167 | |
|
168 | 168 | else: |
|
169 | 169 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
170 | 170 | |
|
171 | 171 | datadict = {"time": k, |
|
172 | 172 | "commits": 1, |
|
173 | 173 | "added": len(cs.added), |
|
174 | 174 | "changed": len(cs.changed), |
|
175 | 175 | "removed": len(cs.removed), |
|
176 | 176 | } |
|
177 | 177 | co_day_auth_aggr[akc(cs.author)]['data']\ |
|
178 | 178 | .append(datadict) |
|
179 | 179 | |
|
180 | 180 | else: |
|
181 | 181 | if k >= ts_min_y and k <= ts_max_y or skip_date_limit: |
|
182 | 182 | co_day_auth_aggr[akc(cs.author)] = { |
|
183 | 183 | "label": akc(cs.author), |
|
184 | 184 | "data": [{"time":k, |
|
185 | 185 | "commits":1, |
|
186 | 186 | "added":len(cs.added), |
|
187 | 187 | "changed":len(cs.changed), |
|
188 | 188 | "removed":len(cs.removed), |
|
189 | 189 | }], |
|
190 | 190 | "schema": ["commits"], |
|
191 | 191 | } |
|
192 | 192 | |
|
193 | 193 | #gather all data by day |
|
194 | 194 | if k in commits_by_day_aggregate: |
|
195 | 195 | commits_by_day_aggregate[k] += 1 |
|
196 | 196 | else: |
|
197 | 197 | commits_by_day_aggregate[k] = 1 |
|
198 | 198 | |
|
199 | 199 | overview_data = sorted(commits_by_day_aggregate.items(), |
|
200 | 200 | key=itemgetter(0)) |
|
201 | 201 | |
|
202 | 202 | if not co_day_auth_aggr: |
|
203 | 203 | co_day_auth_aggr[akc(repo.contact)] = { |
|
204 | 204 | "label": akc(repo.contact), |
|
205 | 205 | "data": [0, 1], |
|
206 | 206 | "schema": ["commits"], |
|
207 | 207 | } |
|
208 | 208 | |
|
209 | 209 | stats = cur_stats if cur_stats else Statistics() |
|
210 | 210 | stats.commit_activity = json.dumps(co_day_auth_aggr) |
|
211 | 211 | stats.commit_activity_combined = json.dumps(overview_data) |
|
212 | 212 | |
|
213 |
log.debug('last revision %s' |
|
|
213 | log.debug('last revision %s', last_rev) | |
|
214 | 214 | leftovers = len(repo.revisions[last_rev:]) |
|
215 |
log.debug('revisions to parse %s' |
|
|
215 | log.debug('revisions to parse %s', leftovers) | |
|
216 | 216 | |
|
217 | 217 | if last_rev == 0 or leftovers < parse_limit: |
|
218 | 218 | log.debug('getting code trending stats') |
|
219 | 219 | stats.languages = json.dumps(__get_codes_stats(repo_name)) |
|
220 | 220 | |
|
221 | 221 | try: |
|
222 | 222 | stats.repository = dbrepo |
|
223 | 223 | stats.stat_on_revision = last_cs.revision if last_cs else 0 |
|
224 | 224 | DBS.add(stats) |
|
225 | 225 | DBS.commit() |
|
226 | 226 | except: |
|
227 | 227 | log.error(traceback.format_exc()) |
|
228 | 228 | DBS.rollback() |
|
229 | 229 | lock.release() |
|
230 | 230 | return False |
|
231 | 231 | |
|
232 | 232 | # final release |
|
233 | 233 | lock.release() |
|
234 | 234 | |
|
235 | 235 | # execute another task if celery is enabled |
|
236 | 236 | if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0: |
|
237 | 237 | recurse_limit -= 1 |
|
238 | 238 | run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y, |
|
239 | 239 | recurse_limit) |
|
240 | 240 | if recurse_limit <= 0: |
|
241 | 241 | log.debug('Breaking recursive mode due to reach of recurse limit') |
|
242 | 242 | return True |
|
243 | 243 | except LockHeld: |
|
244 | 244 | log.info('LockHeld') |
|
245 | 245 | return 'Task with key %s already running' % lockkey |
|
246 | 246 | |
|
247 | 247 | |
|
248 | 248 | @task(ignore_result=True) |
|
249 | 249 | @dbsession |
|
250 | 250 | def send_email(recipients, subject, body='', html_body='', headers=None): |
|
251 | 251 | """ |
|
252 | 252 | Sends an email with defined parameters from the .ini files. |
|
253 | 253 | |
|
254 | 254 | :param recipients: list of recipients, if this is None, the defined email |
|
255 | 255 | address from field 'email_to' and all admins is used instead |
|
256 | 256 | :param subject: subject of the mail |
|
257 | 257 | :param body: body of the mail |
|
258 | 258 | :param html_body: html version of body |
|
259 | 259 | """ |
|
260 | 260 | log = get_logger(send_email) |
|
261 | 261 | assert isinstance(recipients, list), recipients |
|
262 | 262 | |
|
263 | 263 | email_config = config |
|
264 | 264 | email_prefix = email_config.get('email_prefix', '') |
|
265 | 265 | if email_prefix: |
|
266 | 266 | subject = "%s %s" % (email_prefix, subject) |
|
267 | 267 | |
|
268 | 268 | if not recipients: |
|
269 | 269 | # if recipients are not defined we send to email_config + all admins |
|
270 | 270 | recipients = [u.email for u in User.query() |
|
271 | 271 | .filter(User.admin == True).all()] |
|
272 | 272 | if email_config.get('email_to') is not None: |
|
273 | 273 | recipients += [email_config.get('email_to')] |
|
274 | 274 | |
|
275 | 275 | # If there are still no recipients, there are no admins and no address |
|
276 | 276 | # configured in email_to, so return. |
|
277 | 277 | if not recipients: |
|
278 | 278 | log.error("No recipients specified and no fallback available.") |
|
279 | 279 | return False |
|
280 | 280 | |
|
281 | 281 | log.warning("No recipients specified for '%s' - sending to admins %s", subject, ' '.join(recipients)) |
|
282 | 282 | |
|
283 | 283 | mail_from = email_config.get('app_email_from', 'Kallithea') |
|
284 | 284 | user = email_config.get('smtp_username') |
|
285 | 285 | passwd = email_config.get('smtp_password') |
|
286 | 286 | mail_server = email_config.get('smtp_server') |
|
287 | 287 | mail_port = email_config.get('smtp_port') |
|
288 | 288 | tls = str2bool(email_config.get('smtp_use_tls')) |
|
289 | 289 | ssl = str2bool(email_config.get('smtp_use_ssl')) |
|
290 | 290 | debug = str2bool(email_config.get('debug')) |
|
291 | 291 | smtp_auth = email_config.get('smtp_auth') |
|
292 | 292 | |
|
293 | 293 | if not mail_server: |
|
294 | 294 | log.error("SMTP mail server not configured - cannot send mail '%s' to %s", subject, ' '.join(recipients)) |
|
295 | 295 | log.warning("body:\n%s", body) |
|
296 | 296 | log.warning("html:\n%s", html_body) |
|
297 | 297 | return False |
|
298 | 298 | |
|
299 | 299 | try: |
|
300 | 300 | m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth, |
|
301 | 301 | mail_port, ssl, tls, debug=debug) |
|
302 | 302 | m.send(recipients, subject, body, html_body, headers=headers) |
|
303 | 303 | except: |
|
304 | 304 | log.error('Mail sending failed') |
|
305 | 305 | log.error(traceback.format_exc()) |
|
306 | 306 | return False |
|
307 | 307 | return True |
|
308 | 308 | |
|
309 | 309 | @task(ignore_result=False) |
|
310 | 310 | @dbsession |
|
311 | 311 | def create_repo(form_data, cur_user): |
|
312 | 312 | from kallithea.model.repo import RepoModel |
|
313 | 313 | from kallithea.model.user import UserModel |
|
314 | 314 | from kallithea.model.db import Setting |
|
315 | 315 | |
|
316 | 316 | log = get_logger(create_repo) |
|
317 | 317 | DBS = get_session() |
|
318 | 318 | |
|
319 | 319 | cur_user = UserModel(DBS)._get_user(cur_user) |
|
320 | 320 | |
|
321 | 321 | owner = cur_user |
|
322 | 322 | repo_name = form_data['repo_name'] |
|
323 | 323 | repo_name_full = form_data['repo_name_full'] |
|
324 | 324 | repo_type = form_data['repo_type'] |
|
325 | 325 | description = form_data['repo_description'] |
|
326 | 326 | private = form_data['repo_private'] |
|
327 | 327 | clone_uri = form_data.get('clone_uri') |
|
328 | 328 | repo_group = form_data['repo_group'] |
|
329 | 329 | landing_rev = form_data['repo_landing_rev'] |
|
330 | 330 | copy_fork_permissions = form_data.get('copy_permissions') |
|
331 | 331 | copy_group_permissions = form_data.get('repo_copy_permissions') |
|
332 | 332 | fork_of = form_data.get('fork_parent_id') |
|
333 | 333 | state = form_data.get('repo_state', Repository.STATE_PENDING) |
|
334 | 334 | |
|
335 | 335 | # repo creation defaults, private and repo_type are filled in form |
|
336 | 336 | defs = Setting.get_default_repo_settings(strip_prefix=True) |
|
337 | 337 | enable_statistics = defs.get('repo_enable_statistics') |
|
338 | 338 | enable_locking = defs.get('repo_enable_locking') |
|
339 | 339 | enable_downloads = defs.get('repo_enable_downloads') |
|
340 | 340 | |
|
341 | 341 | try: |
|
342 | 342 | repo = RepoModel(DBS)._create_repo( |
|
343 | 343 | repo_name=repo_name_full, |
|
344 | 344 | repo_type=repo_type, |
|
345 | 345 | description=description, |
|
346 | 346 | owner=owner, |
|
347 | 347 | private=private, |
|
348 | 348 | clone_uri=clone_uri, |
|
349 | 349 | repo_group=repo_group, |
|
350 | 350 | landing_rev=landing_rev, |
|
351 | 351 | fork_of=fork_of, |
|
352 | 352 | copy_fork_permissions=copy_fork_permissions, |
|
353 | 353 | copy_group_permissions=copy_group_permissions, |
|
354 | 354 | enable_statistics=enable_statistics, |
|
355 | 355 | enable_locking=enable_locking, |
|
356 | 356 | enable_downloads=enable_downloads, |
|
357 | 357 | state=state |
|
358 | 358 | ) |
|
359 | 359 | |
|
360 | 360 | action_logger(cur_user, 'user_created_repo', |
|
361 | 361 | form_data['repo_name_full'], '', DBS) |
|
362 | 362 | |
|
363 | 363 | DBS.commit() |
|
364 | 364 | # now create this repo on Filesystem |
|
365 | 365 | RepoModel(DBS)._create_filesystem_repo( |
|
366 | 366 | repo_name=repo_name, |
|
367 | 367 | repo_type=repo_type, |
|
368 | 368 | repo_group=RepoModel(DBS)._get_repo_group(repo_group), |
|
369 | 369 | clone_uri=clone_uri, |
|
370 | 370 | ) |
|
371 | 371 | repo = Repository.get_by_repo_name(repo_name_full) |
|
372 | 372 | log_create_repository(repo.get_dict(), created_by=owner.username) |
|
373 | 373 | |
|
374 | 374 | # update repo changeset caches initially |
|
375 | 375 | repo.update_changeset_cache() |
|
376 | 376 | |
|
377 | 377 | # set new created state |
|
378 | 378 | repo.set_state(Repository.STATE_CREATED) |
|
379 | 379 | DBS.commit() |
|
380 | 380 | except Exception as e: |
|
381 | 381 | log.warning('Exception %s occurred when forking repository, ' |
|
382 | 382 | 'doing cleanup...' % e) |
|
383 | 383 | # rollback things manually ! |
|
384 | 384 | repo = Repository.get_by_repo_name(repo_name_full) |
|
385 | 385 | if repo: |
|
386 | 386 | Repository.delete(repo.repo_id) |
|
387 | 387 | DBS.commit() |
|
388 | 388 | RepoModel(DBS)._delete_filesystem_repo(repo) |
|
389 | 389 | raise |
|
390 | 390 | |
|
391 | 391 | # it's an odd fix to make celery fail task when exception occurs |
|
392 | 392 | def on_failure(self, *args, **kwargs): |
|
393 | 393 | pass |
|
394 | 394 | |
|
395 | 395 | return True |
|
396 | 396 | |
|
397 | 397 | |
|
398 | 398 | @task(ignore_result=False) |
|
399 | 399 | @dbsession |
|
400 | 400 | def create_repo_fork(form_data, cur_user): |
|
401 | 401 | """ |
|
402 | 402 | Creates a fork of repository using interval VCS methods |
|
403 | 403 | |
|
404 | 404 | :param form_data: |
|
405 | 405 | :param cur_user: |
|
406 | 406 | """ |
|
407 | 407 | from kallithea.model.repo import RepoModel |
|
408 | 408 | from kallithea.model.user import UserModel |
|
409 | 409 | |
|
410 | 410 | log = get_logger(create_repo_fork) |
|
411 | 411 | DBS = get_session() |
|
412 | 412 | |
|
413 | 413 | base_path = Repository.base_path() |
|
414 | 414 | cur_user = UserModel(DBS)._get_user(cur_user) |
|
415 | 415 | |
|
416 | 416 | repo_name = form_data['repo_name'] # fork in this case |
|
417 | 417 | repo_name_full = form_data['repo_name_full'] |
|
418 | 418 | |
|
419 | 419 | repo_type = form_data['repo_type'] |
|
420 | 420 | owner = cur_user |
|
421 | 421 | private = form_data['private'] |
|
422 | 422 | clone_uri = form_data.get('clone_uri') |
|
423 | 423 | repo_group = form_data['repo_group'] |
|
424 | 424 | landing_rev = form_data['landing_rev'] |
|
425 | 425 | copy_fork_permissions = form_data.get('copy_permissions') |
|
426 | 426 | |
|
427 | 427 | try: |
|
428 | 428 | fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id')) |
|
429 | 429 | |
|
430 | 430 | RepoModel(DBS)._create_repo( |
|
431 | 431 | repo_name=repo_name_full, |
|
432 | 432 | repo_type=repo_type, |
|
433 | 433 | description=form_data['description'], |
|
434 | 434 | owner=owner, |
|
435 | 435 | private=private, |
|
436 | 436 | clone_uri=clone_uri, |
|
437 | 437 | repo_group=repo_group, |
|
438 | 438 | landing_rev=landing_rev, |
|
439 | 439 | fork_of=fork_of, |
|
440 | 440 | copy_fork_permissions=copy_fork_permissions |
|
441 | 441 | ) |
|
442 | 442 | action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full, |
|
443 | 443 | fork_of.repo_name, '', DBS) |
|
444 | 444 | DBS.commit() |
|
445 | 445 | |
|
446 | 446 | update_after_clone = form_data['update_after_clone'] # FIXME - unused! |
|
447 | 447 | source_repo_path = os.path.join(base_path, fork_of.repo_name) |
|
448 | 448 | |
|
449 | 449 | # now create this repo on Filesystem |
|
450 | 450 | RepoModel(DBS)._create_filesystem_repo( |
|
451 | 451 | repo_name=repo_name, |
|
452 | 452 | repo_type=repo_type, |
|
453 | 453 | repo_group=RepoModel(DBS)._get_repo_group(repo_group), |
|
454 | 454 | clone_uri=source_repo_path, |
|
455 | 455 | ) |
|
456 | 456 | repo = Repository.get_by_repo_name(repo_name_full) |
|
457 | 457 | log_create_repository(repo.get_dict(), created_by=owner.username) |
|
458 | 458 | |
|
459 | 459 | # update repo changeset caches initially |
|
460 | 460 | repo.update_changeset_cache() |
|
461 | 461 | |
|
462 | 462 | # set new created state |
|
463 | 463 | repo.set_state(Repository.STATE_CREATED) |
|
464 | 464 | DBS.commit() |
|
465 | 465 | except Exception as e: |
|
466 | 466 | log.warning('Exception %s occurred when forking repository, ' |
|
467 | 467 | 'doing cleanup...' % e) |
|
468 | 468 | #rollback things manually ! |
|
469 | 469 | repo = Repository.get_by_repo_name(repo_name_full) |
|
470 | 470 | if repo: |
|
471 | 471 | Repository.delete(repo.repo_id) |
|
472 | 472 | DBS.commit() |
|
473 | 473 | RepoModel(DBS)._delete_filesystem_repo(repo) |
|
474 | 474 | raise |
|
475 | 475 | |
|
476 | 476 | # it's an odd fix to make celery fail task when exception occurs |
|
477 | 477 | def on_failure(self, *args, **kwargs): |
|
478 | 478 | pass |
|
479 | 479 | |
|
480 | 480 | return True |
|
481 | 481 | |
|
482 | 482 | |
|
483 | 483 | def __get_codes_stats(repo_name): |
|
484 | 484 | from kallithea.config.conf import LANGUAGES_EXTENSIONS_MAP |
|
485 | 485 | repo = Repository.get_by_repo_name(repo_name).scm_instance |
|
486 | 486 | |
|
487 | 487 | tip = repo.get_changeset() |
|
488 | 488 | code_stats = {} |
|
489 | 489 | |
|
490 | 490 | def aggregate(cs): |
|
491 | 491 | for f in cs[2]: |
|
492 | 492 | ext = lower(f.extension) |
|
493 | 493 | if ext in LANGUAGES_EXTENSIONS_MAP.keys() and not f.is_binary: |
|
494 | 494 | if ext in code_stats: |
|
495 | 495 | code_stats[ext] += 1 |
|
496 | 496 | else: |
|
497 | 497 | code_stats[ext] = 1 |
|
498 | 498 | |
|
499 | 499 | map(aggregate, tip.walk('/')) |
|
500 | 500 | |
|
501 | 501 | return code_stats or {} |
@@ -1,570 +1,570 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.db_manage |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Database creation, and setup module for Kallithea. Used for creation |
|
19 | 19 | of database as well as for migration operations |
|
20 | 20 | |
|
21 | 21 | This file was forked by the Kallithea project in July 2014. |
|
22 | 22 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | 23 | :created_on: Apr 10, 2010 |
|
24 | 24 | :author: marcink |
|
25 | 25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | 26 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | import os |
|
30 | 30 | import sys |
|
31 | 31 | import time |
|
32 | 32 | import uuid |
|
33 | 33 | import logging |
|
34 | 34 | from os.path import dirname as dn, join as jn |
|
35 | 35 | |
|
36 | 36 | from kallithea import __dbversion__, __py_version__, EXTERN_TYPE_INTERNAL, DB_MIGRATIONS |
|
37 | 37 | from kallithea.model.user import UserModel |
|
38 | 38 | from kallithea.lib.utils import ask_ok |
|
39 | 39 | from kallithea.model import init_model |
|
40 | 40 | from kallithea.model.db import User, Permission, Ui, \ |
|
41 | 41 | Setting, UserToPerm, DbMigrateVersion, RepoGroup, \ |
|
42 | 42 | UserRepoGroupToPerm, CacheInvalidation, UserGroup, Repository |
|
43 | 43 | |
|
44 | 44 | from sqlalchemy.engine import create_engine |
|
45 | 45 | from kallithea.model.repo_group import RepoGroupModel |
|
46 | 46 | #from kallithea.model import meta |
|
47 | 47 | from kallithea.model.meta import Session, Base |
|
48 | 48 | from kallithea.model.repo import RepoModel |
|
49 | 49 | from kallithea.model.permission import PermissionModel |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | log = logging.getLogger(__name__) |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | def notify(msg): |
|
56 | 56 | """ |
|
57 | 57 | Notification for migrations messages |
|
58 | 58 | """ |
|
59 | 59 | ml = len(msg) + (4 * 2) |
|
60 | 60 | print('\n%s\n*** %s ***\n%s' % ('*' * ml, msg, '*' * ml)).upper() |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class DbManage(object): |
|
64 | 64 | def __init__(self, log_sql, dbconf, root, tests=False, SESSION=None, cli_args={}): |
|
65 | 65 | self.dbname = dbconf.split('/')[-1] |
|
66 | 66 | self.tests = tests |
|
67 | 67 | self.root = root |
|
68 | 68 | self.dburi = dbconf |
|
69 | 69 | self.log_sql = log_sql |
|
70 | 70 | self.db_exists = False |
|
71 | 71 | self.cli_args = cli_args |
|
72 | 72 | self.init_db(SESSION=SESSION) |
|
73 | 73 | |
|
74 | 74 | force_ask = self.cli_args.get('force_ask') |
|
75 | 75 | if force_ask is not None: |
|
76 | 76 | global ask_ok |
|
77 | 77 | ask_ok = lambda *args, **kwargs: force_ask |
|
78 | 78 | |
|
79 | 79 | def init_db(self, SESSION=None): |
|
80 | 80 | if SESSION: |
|
81 | 81 | self.sa = SESSION |
|
82 | 82 | else: |
|
83 | 83 | #init new sessions |
|
84 | 84 | engine = create_engine(self.dburi, echo=self.log_sql) |
|
85 | 85 | init_model(engine) |
|
86 | 86 | self.sa = Session() |
|
87 | 87 | |
|
88 | 88 | def create_tables(self, override=False): |
|
89 | 89 | """ |
|
90 | 90 | Create a auth database |
|
91 | 91 | """ |
|
92 | 92 | |
|
93 | 93 | log.info("Any existing database is going to be destroyed") |
|
94 | 94 | if self.tests: |
|
95 | 95 | destroy = True |
|
96 | 96 | else: |
|
97 | 97 | destroy = ask_ok('Are you sure to destroy old database ? [y/n]') |
|
98 | 98 | if not destroy: |
|
99 | 99 | print 'Nothing done.' |
|
100 | 100 | sys.exit(0) |
|
101 | 101 | if destroy: |
|
102 | 102 | Base.metadata.drop_all() |
|
103 | 103 | |
|
104 | 104 | checkfirst = not override |
|
105 | 105 | Base.metadata.create_all(checkfirst=checkfirst) |
|
106 |
log.info('Created tables for %s' |
|
|
106 | log.info('Created tables for %s', self.dbname) | |
|
107 | 107 | |
|
108 | 108 | def set_db_version(self): |
|
109 | 109 | ver = DbMigrateVersion() |
|
110 | 110 | ver.version = __dbversion__ |
|
111 | 111 | ver.repository_id = DB_MIGRATIONS |
|
112 | 112 | ver.repository_path = 'versions' |
|
113 | 113 | self.sa.add(ver) |
|
114 |
log.info('db version set to: %s' |
|
|
114 | log.info('db version set to: %s', __dbversion__) | |
|
115 | 115 | |
|
116 | 116 | def upgrade(self): |
|
117 | 117 | """ |
|
118 | 118 | Upgrades given database schema to given revision following |
|
119 | 119 | all needed steps, to perform the upgrade |
|
120 | 120 | |
|
121 | 121 | """ |
|
122 | 122 | |
|
123 | 123 | from kallithea.lib.dbmigrate.migrate.versioning import api |
|
124 | 124 | from kallithea.lib.dbmigrate.migrate.exceptions import \ |
|
125 | 125 | DatabaseNotControlledError |
|
126 | 126 | |
|
127 | 127 | if 'sqlite' in self.dburi: |
|
128 | 128 | print ( |
|
129 | 129 | '********************** WARNING **********************\n' |
|
130 | 130 | 'Make sure your version of sqlite is at least 3.7.X. \n' |
|
131 | 131 | 'Earlier versions are known to fail on some migrations\n' |
|
132 | 132 | '*****************************************************\n') |
|
133 | 133 | |
|
134 | 134 | upgrade = ask_ok('You are about to perform database upgrade, make ' |
|
135 | 135 | 'sure You backed up your database before. ' |
|
136 | 136 | 'Continue ? [y/n]') |
|
137 | 137 | if not upgrade: |
|
138 | 138 | print 'No upgrade performed' |
|
139 | 139 | sys.exit(0) |
|
140 | 140 | |
|
141 | 141 | repository_path = jn(dn(dn(dn(os.path.realpath(__file__)))), |
|
142 | 142 | 'kallithea/lib/dbmigrate') |
|
143 | 143 | db_uri = self.dburi |
|
144 | 144 | |
|
145 | 145 | try: |
|
146 | 146 | curr_version = api.db_version(db_uri, repository_path) |
|
147 | 147 | msg = ('Found current database under version ' |
|
148 | 148 | 'control with version %s' % curr_version) |
|
149 | 149 | |
|
150 | 150 | except (RuntimeError, DatabaseNotControlledError): |
|
151 | 151 | curr_version = 1 |
|
152 | 152 | msg = ('Current database is not under version control. Setting ' |
|
153 | 153 | 'as version %s' % curr_version) |
|
154 | 154 | api.version_control(db_uri, repository_path, curr_version) |
|
155 | 155 | |
|
156 | 156 | notify(msg) |
|
157 | 157 | if curr_version == __dbversion__: |
|
158 | 158 | print 'This database is already at the newest version' |
|
159 | 159 | sys.exit(0) |
|
160 | 160 | |
|
161 | 161 | # clear cache keys |
|
162 | 162 | log.info("Clearing cache keys now...") |
|
163 | 163 | CacheInvalidation.clear_cache() |
|
164 | 164 | |
|
165 | 165 | upgrade_steps = range(curr_version + 1, __dbversion__ + 1) |
|
166 | 166 | notify('attempting to do database upgrade from ' |
|
167 | 167 | 'version %s to version %s' % (curr_version, __dbversion__)) |
|
168 | 168 | |
|
169 | 169 | # CALL THE PROPER ORDER OF STEPS TO PERFORM FULL UPGRADE |
|
170 | 170 | _step = None |
|
171 | 171 | for step in upgrade_steps: |
|
172 | 172 | notify('performing upgrade step %s' % step) |
|
173 | 173 | time.sleep(0.5) |
|
174 | 174 | |
|
175 | 175 | api.upgrade(db_uri, repository_path, step) |
|
176 | 176 | notify('schema upgrade for step %s completed' % (step,)) |
|
177 | 177 | |
|
178 | 178 | _step = step |
|
179 | 179 | |
|
180 | 180 | notify('upgrade to version %s successful' % _step) |
|
181 | 181 | |
|
182 | 182 | def fix_repo_paths(self): |
|
183 | 183 | """ |
|
184 | 184 | Fixes a old kallithea version path into new one without a '*' |
|
185 | 185 | """ |
|
186 | 186 | |
|
187 | 187 | paths = self.sa.query(Ui)\ |
|
188 | 188 | .filter(Ui.ui_key == '/')\ |
|
189 | 189 | .scalar() |
|
190 | 190 | |
|
191 | 191 | paths.ui_value = paths.ui_value.replace('*', '') |
|
192 | 192 | |
|
193 | 193 | self.sa.add(paths) |
|
194 | 194 | self.sa.commit() |
|
195 | 195 | |
|
196 | 196 | def fix_default_user(self): |
|
197 | 197 | """ |
|
198 | 198 | Fixes a old default user with some 'nicer' default values, |
|
199 | 199 | used mostly for anonymous access |
|
200 | 200 | """ |
|
201 | 201 | def_user = self.sa.query(User)\ |
|
202 | 202 | .filter(User.username == User.DEFAULT_USER)\ |
|
203 | 203 | .one() |
|
204 | 204 | |
|
205 | 205 | def_user.name = 'Anonymous' |
|
206 | 206 | def_user.lastname = 'User' |
|
207 | 207 | def_user.email = 'anonymous@kallithea-scm.org' |
|
208 | 208 | |
|
209 | 209 | self.sa.add(def_user) |
|
210 | 210 | self.sa.commit() |
|
211 | 211 | |
|
212 | 212 | def fix_settings(self): |
|
213 | 213 | """ |
|
214 | 214 | Fixes kallithea settings adds ga_code key for google analytics |
|
215 | 215 | """ |
|
216 | 216 | |
|
217 | 217 | hgsettings3 = Setting('ga_code', '') |
|
218 | 218 | |
|
219 | 219 | self.sa.add(hgsettings3) |
|
220 | 220 | self.sa.commit() |
|
221 | 221 | |
|
222 | 222 | def admin_prompt(self, second=False): |
|
223 | 223 | if not self.tests: |
|
224 | 224 | import getpass |
|
225 | 225 | |
|
226 | 226 | # defaults |
|
227 | 227 | defaults = self.cli_args |
|
228 | 228 | username = defaults.get('username') |
|
229 | 229 | password = defaults.get('password') |
|
230 | 230 | email = defaults.get('email') |
|
231 | 231 | |
|
232 | 232 | def get_password(): |
|
233 | 233 | password = getpass.getpass('Specify admin password ' |
|
234 | 234 | '(min 6 chars):') |
|
235 | 235 | confirm = getpass.getpass('Confirm password:') |
|
236 | 236 | |
|
237 | 237 | if password != confirm: |
|
238 | 238 | log.error('passwords mismatch') |
|
239 | 239 | return False |
|
240 | 240 | if len(password) < 6: |
|
241 | 241 | log.error('password is to short use at least 6 characters') |
|
242 | 242 | return False |
|
243 | 243 | |
|
244 | 244 | return password |
|
245 | 245 | if username is None: |
|
246 | 246 | username = raw_input('Specify admin username:') |
|
247 | 247 | if password is None: |
|
248 | 248 | password = get_password() |
|
249 | 249 | if not password: |
|
250 | 250 | #second try |
|
251 | 251 | password = get_password() |
|
252 | 252 | if not password: |
|
253 | 253 | sys.exit() |
|
254 | 254 | if email is None: |
|
255 | 255 | email = raw_input('Specify admin email:') |
|
256 | 256 | self.create_user(username, password, email, True) |
|
257 | 257 | else: |
|
258 | 258 | log.info('creating admin and regular test users') |
|
259 | 259 | from kallithea.tests import TEST_USER_ADMIN_LOGIN, \ |
|
260 | 260 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
261 | 261 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
262 | 262 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
263 | 263 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
264 | 264 | |
|
265 | 265 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
266 | 266 | TEST_USER_ADMIN_EMAIL, True) |
|
267 | 267 | |
|
268 | 268 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
269 | 269 | TEST_USER_REGULAR_EMAIL, False) |
|
270 | 270 | |
|
271 | 271 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
272 | 272 | TEST_USER_REGULAR2_EMAIL, False) |
|
273 | 273 | |
|
274 | 274 | def create_ui_settings(self, repo_store_path): |
|
275 | 275 | """ |
|
276 | 276 | Creates ui settings, fills out hooks |
|
277 | 277 | """ |
|
278 | 278 | |
|
279 | 279 | #HOOKS |
|
280 | 280 | hooks1_key = Ui.HOOK_UPDATE |
|
281 | 281 | hooks1_ = self.sa.query(Ui)\ |
|
282 | 282 | .filter(Ui.ui_key == hooks1_key).scalar() |
|
283 | 283 | |
|
284 | 284 | hooks1 = Ui() if hooks1_ is None else hooks1_ |
|
285 | 285 | hooks1.ui_section = 'hooks' |
|
286 | 286 | hooks1.ui_key = hooks1_key |
|
287 | 287 | hooks1.ui_value = 'hg update >&2' |
|
288 | 288 | hooks1.ui_active = False |
|
289 | 289 | self.sa.add(hooks1) |
|
290 | 290 | |
|
291 | 291 | hooks2_key = Ui.HOOK_REPO_SIZE |
|
292 | 292 | hooks2_ = self.sa.query(Ui)\ |
|
293 | 293 | .filter(Ui.ui_key == hooks2_key).scalar() |
|
294 | 294 | hooks2 = Ui() if hooks2_ is None else hooks2_ |
|
295 | 295 | hooks2.ui_section = 'hooks' |
|
296 | 296 | hooks2.ui_key = hooks2_key |
|
297 | 297 | hooks2.ui_value = 'python:kallithea.lib.hooks.repo_size' |
|
298 | 298 | self.sa.add(hooks2) |
|
299 | 299 | |
|
300 | 300 | hooks3 = Ui() |
|
301 | 301 | hooks3.ui_section = 'hooks' |
|
302 | 302 | hooks3.ui_key = Ui.HOOK_PUSH |
|
303 | 303 | hooks3.ui_value = 'python:kallithea.lib.hooks.log_push_action' |
|
304 | 304 | self.sa.add(hooks3) |
|
305 | 305 | |
|
306 | 306 | hooks4 = Ui() |
|
307 | 307 | hooks4.ui_section = 'hooks' |
|
308 | 308 | hooks4.ui_key = Ui.HOOK_PRE_PUSH |
|
309 | 309 | hooks4.ui_value = 'python:kallithea.lib.hooks.pre_push' |
|
310 | 310 | self.sa.add(hooks4) |
|
311 | 311 | |
|
312 | 312 | hooks5 = Ui() |
|
313 | 313 | hooks5.ui_section = 'hooks' |
|
314 | 314 | hooks5.ui_key = Ui.HOOK_PULL |
|
315 | 315 | hooks5.ui_value = 'python:kallithea.lib.hooks.log_pull_action' |
|
316 | 316 | self.sa.add(hooks5) |
|
317 | 317 | |
|
318 | 318 | hooks6 = Ui() |
|
319 | 319 | hooks6.ui_section = 'hooks' |
|
320 | 320 | hooks6.ui_key = Ui.HOOK_PRE_PULL |
|
321 | 321 | hooks6.ui_value = 'python:kallithea.lib.hooks.pre_pull' |
|
322 | 322 | self.sa.add(hooks6) |
|
323 | 323 | |
|
324 | 324 | # enable largefiles |
|
325 | 325 | largefiles = Ui() |
|
326 | 326 | largefiles.ui_section = 'extensions' |
|
327 | 327 | largefiles.ui_key = 'largefiles' |
|
328 | 328 | largefiles.ui_value = '' |
|
329 | 329 | self.sa.add(largefiles) |
|
330 | 330 | |
|
331 | 331 | # set default largefiles cache dir, defaults to |
|
332 | 332 | # /repo location/.cache/largefiles |
|
333 | 333 | largefiles = Ui() |
|
334 | 334 | largefiles.ui_section = 'largefiles' |
|
335 | 335 | largefiles.ui_key = 'usercache' |
|
336 | 336 | largefiles.ui_value = os.path.join(repo_store_path, '.cache', |
|
337 | 337 | 'largefiles') |
|
338 | 338 | self.sa.add(largefiles) |
|
339 | 339 | |
|
340 | 340 | # enable hgsubversion disabled by default |
|
341 | 341 | hgsubversion = Ui() |
|
342 | 342 | hgsubversion.ui_section = 'extensions' |
|
343 | 343 | hgsubversion.ui_key = 'hgsubversion' |
|
344 | 344 | hgsubversion.ui_value = '' |
|
345 | 345 | hgsubversion.ui_active = False |
|
346 | 346 | self.sa.add(hgsubversion) |
|
347 | 347 | |
|
348 | 348 | # enable hggit disabled by default |
|
349 | 349 | hggit = Ui() |
|
350 | 350 | hggit.ui_section = 'extensions' |
|
351 | 351 | hggit.ui_key = 'hggit' |
|
352 | 352 | hggit.ui_value = '' |
|
353 | 353 | hggit.ui_active = False |
|
354 | 354 | self.sa.add(hggit) |
|
355 | 355 | |
|
356 | 356 | def create_auth_plugin_options(self, skip_existing=False): |
|
357 | 357 | """ |
|
358 | 358 | Create default auth plugin settings, and make it active |
|
359 | 359 | |
|
360 | 360 | :param skip_existing: |
|
361 | 361 | """ |
|
362 | 362 | |
|
363 | 363 | for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'), |
|
364 | 364 | ('auth_internal_enabled', 'True', 'bool')]: |
|
365 | 365 | if skip_existing and Setting.get_by_name(k) != None: |
|
366 |
log.debug('Skipping option %s' |
|
|
366 | log.debug('Skipping option %s', k) | |
|
367 | 367 | continue |
|
368 | 368 | setting = Setting(k, v, t) |
|
369 | 369 | self.sa.add(setting) |
|
370 | 370 | |
|
371 | 371 | def create_default_options(self, skip_existing=False): |
|
372 | 372 | """Creates default settings""" |
|
373 | 373 | |
|
374 | 374 | for k, v, t in [ |
|
375 | 375 | ('default_repo_enable_locking', False, 'bool'), |
|
376 | 376 | ('default_repo_enable_downloads', False, 'bool'), |
|
377 | 377 | ('default_repo_enable_statistics', False, 'bool'), |
|
378 | 378 | ('default_repo_private', False, 'bool'), |
|
379 | 379 | ('default_repo_type', 'hg', 'unicode')]: |
|
380 | 380 | |
|
381 | 381 | if skip_existing and Setting.get_by_name(k) is not None: |
|
382 |
log.debug('Skipping option %s' |
|
|
382 | log.debug('Skipping option %s', k) | |
|
383 | 383 | continue |
|
384 | 384 | setting = Setting(k, v, t) |
|
385 | 385 | self.sa.add(setting) |
|
386 | 386 | |
|
387 | 387 | def fixup_groups(self): |
|
388 | 388 | def_usr = User.get_default_user() |
|
389 | 389 | for g in RepoGroup.query().all(): |
|
390 | 390 | g.group_name = g.get_new_name(g.name) |
|
391 | 391 | self.sa.add(g) |
|
392 | 392 | # get default perm |
|
393 | 393 | default = UserRepoGroupToPerm.query()\ |
|
394 | 394 | .filter(UserRepoGroupToPerm.group == g)\ |
|
395 | 395 | .filter(UserRepoGroupToPerm.user == def_usr)\ |
|
396 | 396 | .scalar() |
|
397 | 397 | |
|
398 | 398 | if default is None: |
|
399 |
log.debug('missing default permission for group %s adding' |
|
|
399 | log.debug('missing default permission for group %s adding', g) | |
|
400 | 400 | perm_obj = RepoGroupModel()._create_default_perms(g) |
|
401 | 401 | self.sa.add(perm_obj) |
|
402 | 402 | |
|
403 | 403 | def reset_permissions(self, username): |
|
404 | 404 | """ |
|
405 | 405 | Resets permissions to default state, useful when old systems had |
|
406 | 406 | bad permissions, we must clean them up |
|
407 | 407 | |
|
408 | 408 | :param username: |
|
409 | 409 | """ |
|
410 | 410 | default_user = User.get_by_username(username) |
|
411 | 411 | if not default_user: |
|
412 | 412 | return |
|
413 | 413 | |
|
414 | 414 | u2p = UserToPerm.query()\ |
|
415 | 415 | .filter(UserToPerm.user == default_user).all() |
|
416 | 416 | fixed = False |
|
417 | 417 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
418 | 418 | for p in u2p: |
|
419 | 419 | Session().delete(p) |
|
420 | 420 | fixed = True |
|
421 | 421 | self.populate_default_permissions() |
|
422 | 422 | return fixed |
|
423 | 423 | |
|
424 | 424 | def update_repo_info(self): |
|
425 | 425 | RepoModel.update_repoinfo() |
|
426 | 426 | |
|
427 | 427 | def config_prompt(self, test_repo_path='', retries=3): |
|
428 | 428 | defaults = self.cli_args |
|
429 | 429 | _path = defaults.get('repos_location') |
|
430 | 430 | if retries == 3: |
|
431 | 431 | log.info('Setting up repositories config') |
|
432 | 432 | |
|
433 | 433 | if _path is not None: |
|
434 | 434 | path = _path |
|
435 | 435 | elif not self.tests and not test_repo_path: |
|
436 | 436 | path = raw_input( |
|
437 | 437 | 'Enter a valid absolute path to store repositories. ' |
|
438 | 438 | 'All repositories in that path will be added automatically:' |
|
439 | 439 | ) |
|
440 | 440 | else: |
|
441 | 441 | path = test_repo_path |
|
442 | 442 | path_ok = True |
|
443 | 443 | |
|
444 | 444 | # check proper dir |
|
445 | 445 | if not os.path.isdir(path): |
|
446 | 446 | path_ok = False |
|
447 |
log.error('Given path %s is not a valid directory' |
|
|
447 | log.error('Given path %s is not a valid directory', path) | |
|
448 | 448 | |
|
449 | 449 | elif not os.path.isabs(path): |
|
450 | 450 | path_ok = False |
|
451 |
log.error('Given path %s is not an absolute path' |
|
|
451 | log.error('Given path %s is not an absolute path', path) | |
|
452 | 452 | |
|
453 | 453 | # check if path is at least readable. |
|
454 | 454 | if not os.access(path, os.R_OK): |
|
455 | 455 | path_ok = False |
|
456 |
log.error('Given path %s is not readable' |
|
|
456 | log.error('Given path %s is not readable', path) | |
|
457 | 457 | |
|
458 | 458 | # check write access, warn user about non writeable paths |
|
459 | 459 | elif not os.access(path, os.W_OK) and path_ok: |
|
460 |
log.warning('No write permission to given path %s' |
|
|
460 | log.warning('No write permission to given path %s', path) | |
|
461 | 461 | if not ask_ok('Given path %s is not writeable, do you want to ' |
|
462 | 462 | 'continue with read only mode ? [y/n]' % (path,)): |
|
463 | 463 | log.error('Canceled by user') |
|
464 | 464 | sys.exit(-1) |
|
465 | 465 | |
|
466 | 466 | if retries == 0: |
|
467 | 467 | sys.exit('max retries reached') |
|
468 | 468 | if not path_ok: |
|
469 | 469 | retries -= 1 |
|
470 | 470 | return self.config_prompt(test_repo_path, retries) |
|
471 | 471 | |
|
472 | 472 | real_path = os.path.normpath(os.path.realpath(path)) |
|
473 | 473 | |
|
474 | 474 | if real_path != os.path.normpath(path): |
|
475 |
log.warning('Using normalized path %s instead of %s' |
|
|
475 | log.warning('Using normalized path %s instead of %s', real_path, path) | |
|
476 | 476 | |
|
477 | 477 | return real_path |
|
478 | 478 | |
|
479 | 479 | def create_settings(self, path): |
|
480 | 480 | |
|
481 | 481 | self.create_ui_settings(path) |
|
482 | 482 | |
|
483 | 483 | ui_config = [ |
|
484 | 484 | ('web', 'push_ssl', 'false'), |
|
485 | 485 | ('web', 'allow_archive', 'gz zip bz2'), |
|
486 | 486 | ('web', 'allow_push', '*'), |
|
487 | 487 | ('web', 'baseurl', '/'), |
|
488 | 488 | ('paths', '/', path), |
|
489 | 489 | #('phases', 'publish', 'false') |
|
490 | 490 | ] |
|
491 | 491 | for section, key, value in ui_config: |
|
492 | 492 | ui_conf = Ui() |
|
493 | 493 | setattr(ui_conf, 'ui_section', section) |
|
494 | 494 | setattr(ui_conf, 'ui_key', key) |
|
495 | 495 | setattr(ui_conf, 'ui_value', value) |
|
496 | 496 | self.sa.add(ui_conf) |
|
497 | 497 | |
|
498 | 498 | settings = [ |
|
499 | 499 | ('realm', 'Kallithea', 'unicode'), |
|
500 | 500 | ('title', '', 'unicode'), |
|
501 | 501 | ('ga_code', '', 'unicode'), |
|
502 | 502 | ('show_public_icon', True, 'bool'), |
|
503 | 503 | ('show_private_icon', True, 'bool'), |
|
504 | 504 | ('stylify_metatags', False, 'bool'), |
|
505 | 505 | ('dashboard_items', 100, 'int'), |
|
506 | 506 | ('admin_grid_items', 25, 'int'), |
|
507 | 507 | ('show_version', True, 'bool'), |
|
508 | 508 | ('use_gravatar', True, 'bool'), |
|
509 | 509 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
510 | 510 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
511 | 511 | ('update_url', Setting.DEFAULT_UPDATE_URL, 'unicode'), |
|
512 | 512 | ] |
|
513 | 513 | for key, val, type_ in settings: |
|
514 | 514 | sett = Setting(key, val, type_) |
|
515 | 515 | self.sa.add(sett) |
|
516 | 516 | |
|
517 | 517 | self.create_auth_plugin_options() |
|
518 | 518 | self.create_default_options() |
|
519 | 519 | |
|
520 | 520 | log.info('created ui config') |
|
521 | 521 | |
|
522 | 522 | def create_user(self, username, password, email='', admin=False): |
|
523 |
log.info('creating user %s' |
|
|
523 | log.info('creating user %s', username) | |
|
524 | 524 | UserModel().create_or_update(username, password, email, |
|
525 | 525 | firstname='Kallithea', lastname='Admin', |
|
526 | 526 | active=True, admin=admin, |
|
527 | 527 | extern_type=EXTERN_TYPE_INTERNAL) |
|
528 | 528 | |
|
529 | 529 | def create_default_user(self): |
|
530 | 530 | log.info('creating default user') |
|
531 | 531 | # create default user for handling default permissions. |
|
532 | 532 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
533 | 533 | password=str(uuid.uuid1())[:20], |
|
534 | 534 | email='anonymous@kallithea-scm.org', |
|
535 | 535 | firstname='Anonymous', |
|
536 | 536 | lastname='User') |
|
537 | 537 | # based on configuration options activate/deactivate this user which |
|
538 | 538 | # controls anonymous access |
|
539 | 539 | if self.cli_args.get('public_access') is False: |
|
540 | 540 | log.info('Public access disabled') |
|
541 | 541 | user.active = False |
|
542 | 542 | Session().add(user) |
|
543 | 543 | Session().commit() |
|
544 | 544 | |
|
545 | 545 | def create_permissions(self): |
|
546 | 546 | """ |
|
547 | 547 | Creates all permissions defined in the system |
|
548 | 548 | """ |
|
549 | 549 | # module.(access|create|change|delete)_[name] |
|
550 | 550 | # module.(none|read|write|admin) |
|
551 | 551 | log.info('creating permissions') |
|
552 | 552 | PermissionModel(self.sa).create_permissions() |
|
553 | 553 | |
|
554 | 554 | def populate_default_permissions(self): |
|
555 | 555 | """ |
|
556 | 556 | Populate default permissions. It will create only the default |
|
557 | 557 | permissions that are missing, and not alter already defined ones |
|
558 | 558 | """ |
|
559 | 559 | log.info('creating default user permissions') |
|
560 | 560 | PermissionModel(self.sa).create_default_permissions(user=User.DEFAULT_USER) |
|
561 | 561 | |
|
562 | 562 | @staticmethod |
|
563 | 563 | def check_waitress(): |
|
564 | 564 | """ |
|
565 | 565 | Function executed at the end of setup |
|
566 | 566 | """ |
|
567 | 567 | if not __py_version__ >= (2, 6): |
|
568 | 568 | notify('Python2.5 detected, please switch ' |
|
569 | 569 | 'egg:waitress#main -> egg:Paste#http ' |
|
570 | 570 | 'in your .ini file') |
@@ -1,100 +1,100 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Script to migrate repository from sqlalchemy <= 0.4.4 to the new |
|
3 | 3 | repository schema. This shouldn't use any other migrate modules, so |
|
4 | 4 | that it can work in any version. |
|
5 | 5 | """ |
|
6 | 6 | |
|
7 | 7 | import os |
|
8 | 8 | import sys |
|
9 | 9 | import logging |
|
10 | 10 | |
|
11 | 11 | log = logging.getLogger(__name__) |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | def usage(): |
|
15 | 15 | """Gives usage information.""" |
|
16 | 16 | print """Usage: %(prog)s repository-to-migrate |
|
17 | 17 | |
|
18 | 18 | Upgrade your repository to the new flat format. |
|
19 | 19 | |
|
20 | 20 | NOTE: You should probably make a backup before running this. |
|
21 | 21 | """ % {'prog': sys.argv[0]} |
|
22 | 22 | |
|
23 | 23 | sys.exit(1) |
|
24 | 24 | |
|
25 | 25 | |
|
26 | 26 | def delete_file(filepath): |
|
27 | 27 | """Deletes a file and prints a message.""" |
|
28 |
log.info('Deleting file: %s' |
|
|
28 | log.info('Deleting file: %s', filepath) | |
|
29 | 29 | os.remove(filepath) |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | def move_file(src, tgt): |
|
33 | 33 | """Moves a file and prints a message.""" |
|
34 |
log.info('Moving file %s to %s' |
|
|
34 | log.info('Moving file %s to %s', src, tgt) | |
|
35 | 35 | if os.path.exists(tgt): |
|
36 | 36 | raise Exception( |
|
37 | 37 | 'Cannot move file %s because target %s already exists' % \ |
|
38 | 38 | (src, tgt)) |
|
39 | 39 | os.rename(src, tgt) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | def delete_directory(dirpath): |
|
43 | 43 | """Delete a directory and print a message.""" |
|
44 |
log.info('Deleting directory: %s' |
|
|
44 | log.info('Deleting directory: %s', dirpath) | |
|
45 | 45 | os.rmdir(dirpath) |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | def migrate_repository(repos): |
|
49 | 49 | """Does the actual migration to the new repository format.""" |
|
50 |
log.info('Migrating repository at: %s to new format' |
|
|
50 | log.info('Migrating repository at: %s to new format', repos) | |
|
51 | 51 | versions = '%s/versions' % repos |
|
52 | 52 | dirs = os.listdir(versions) |
|
53 | 53 | # Only use int's in list. |
|
54 | 54 | numdirs = [int(dirname) for dirname in dirs if dirname.isdigit()] |
|
55 | 55 | numdirs.sort() # Sort list. |
|
56 | 56 | for dirname in numdirs: |
|
57 | 57 | origdir = '%s/%s' % (versions, dirname) |
|
58 |
log.info('Working on directory: %s' |
|
|
58 | log.info('Working on directory: %s', origdir) | |
|
59 | 59 | files = os.listdir(origdir) |
|
60 | 60 | files.sort() |
|
61 | 61 | for filename in files: |
|
62 | 62 | # Delete compiled Python files. |
|
63 | 63 | if filename.endswith('.pyc') or filename.endswith('.pyo'): |
|
64 | 64 | delete_file('%s/%s' % (origdir, filename)) |
|
65 | 65 | |
|
66 | 66 | # Delete empty __init__.py files. |
|
67 | 67 | origfile = '%s/__init__.py' % origdir |
|
68 | 68 | if os.path.exists(origfile) and len(open(origfile).read()) == 0: |
|
69 | 69 | delete_file(origfile) |
|
70 | 70 | |
|
71 | 71 | # Move sql upgrade scripts. |
|
72 | 72 | if filename.endswith('.sql'): |
|
73 | 73 | version, dbms, operation = filename.split('.', 3)[0:3] |
|
74 | 74 | origfile = '%s/%s' % (origdir, filename) |
|
75 | 75 | # For instance: 2.postgres.upgrade.sql -> |
|
76 | 76 | # 002_postgres_upgrade.sql |
|
77 | 77 | tgtfile = '%s/%03d_%s_%s.sql' % ( |
|
78 | 78 | versions, int(version), dbms, operation) |
|
79 | 79 | move_file(origfile, tgtfile) |
|
80 | 80 | |
|
81 | 81 | # Move Python upgrade script. |
|
82 | 82 | pyfile = '%s.py' % dirname |
|
83 | 83 | pyfilepath = '%s/%s' % (origdir, pyfile) |
|
84 | 84 | if os.path.exists(pyfilepath): |
|
85 | 85 | tgtfile = '%s/%03d.py' % (versions, int(dirname)) |
|
86 | 86 | move_file(pyfilepath, tgtfile) |
|
87 | 87 | |
|
88 | 88 | # Try to remove directory. Will fail if it's not empty. |
|
89 | 89 | delete_directory(origdir) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def main(): |
|
93 | 93 | """Main function to be called when using this script.""" |
|
94 | 94 | if len(sys.argv) != 2: |
|
95 | 95 | usage() |
|
96 | 96 | migrate_repository(sys.argv[1]) |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | if __name__ == '__main__': |
|
100 | 100 | main() |
@@ -1,75 +1,75 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | A path/directory class. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 | 5 | import os |
|
6 | 6 | import shutil |
|
7 | 7 | import logging |
|
8 | 8 | |
|
9 | 9 | from kallithea.lib.dbmigrate.migrate import exceptions |
|
10 | 10 | from kallithea.lib.dbmigrate.migrate.versioning.config import * |
|
11 | 11 | from kallithea.lib.dbmigrate.migrate.versioning.util import KeyedInstance |
|
12 | 12 | |
|
13 | 13 | |
|
14 | 14 | log = logging.getLogger(__name__) |
|
15 | 15 | |
|
16 | 16 | class Pathed(KeyedInstance): |
|
17 | 17 | """ |
|
18 | 18 | A class associated with a path/directory tree. |
|
19 | 19 | |
|
20 | 20 | Only one instance of this class may exist for a particular file; |
|
21 | 21 | __new__ will return an existing instance if possible |
|
22 | 22 | """ |
|
23 | 23 | parent = None |
|
24 | 24 | |
|
25 | 25 | @classmethod |
|
26 | 26 | def _key(cls, path): |
|
27 | 27 | return str(path) |
|
28 | 28 | |
|
29 | 29 | def __init__(self, path): |
|
30 | 30 | self.path = path |
|
31 | 31 | if self.__class__.parent is not None: |
|
32 | 32 | self._init_parent(path) |
|
33 | 33 | |
|
34 | 34 | def _init_parent(self, path): |
|
35 | 35 | """Try to initialize this object's parent, if it has one""" |
|
36 | 36 | parent_path = self.__class__._parent_path(path) |
|
37 | 37 | self.parent = self.__class__.parent(parent_path) |
|
38 |
log.debug("Getting parent %r:%r" |
|
|
38 | log.debug("Getting parent %r:%r", self.__class__.parent, parent_path) | |
|
39 | 39 | self.parent._init_child(path, self) |
|
40 | 40 | |
|
41 | 41 | def _init_child(self, child, path): |
|
42 | 42 | """Run when a child of this object is initialized. |
|
43 | 43 | |
|
44 | 44 | Parameters: the child object; the path to this object (its |
|
45 | 45 | parent) |
|
46 | 46 | """ |
|
47 | 47 | |
|
48 | 48 | @classmethod |
|
49 | 49 | def _parent_path(cls, path): |
|
50 | 50 | """ |
|
51 | 51 | Fetch the path of this object's parent from this object's path. |
|
52 | 52 | """ |
|
53 | 53 | # os.path.dirname(), but strip directories like files (like |
|
54 | 54 | # unix basename) |
|
55 | 55 | # |
|
56 | 56 | # Treat directories like files... |
|
57 | 57 | if path[-1] == '/': |
|
58 | 58 | path = path[:-1] |
|
59 | 59 | ret = os.path.dirname(path) |
|
60 | 60 | return ret |
|
61 | 61 | |
|
62 | 62 | @classmethod |
|
63 | 63 | def require_notfound(cls, path): |
|
64 | 64 | """Ensures a given path does not already exist""" |
|
65 | 65 | if os.path.exists(path): |
|
66 | 66 | raise exceptions.PathFoundError(path) |
|
67 | 67 | |
|
68 | 68 | @classmethod |
|
69 | 69 | def require_found(cls, path): |
|
70 | 70 | """Ensures a given path already exists""" |
|
71 | 71 | if not os.path.exists(path): |
|
72 | 72 | raise exceptions.PathNotFoundError(path) |
|
73 | 73 | |
|
74 | 74 | def __str__(self): |
|
75 | 75 | return self.path |
@@ -1,247 +1,247 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | SQLAlchemy migrate repository management. |
|
3 | 3 | """ |
|
4 | 4 | import os |
|
5 | 5 | import shutil |
|
6 | 6 | import string |
|
7 | 7 | import logging |
|
8 | 8 | |
|
9 | 9 | from pkg_resources import resource_filename |
|
10 | 10 | from tempita import Template as TempitaTemplate |
|
11 | 11 | |
|
12 | 12 | import kallithea |
|
13 | 13 | from kallithea.lib.dbmigrate.migrate import exceptions |
|
14 | 14 | from kallithea.lib.dbmigrate.migrate.versioning import version, pathed, cfgparse |
|
15 | 15 | from kallithea.lib.dbmigrate.migrate.versioning.template import Template |
|
16 | 16 | from kallithea.lib.dbmigrate.migrate.versioning.config import * |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | log = logging.getLogger(__name__) |
|
20 | 20 | |
|
21 | 21 | class Changeset(dict): |
|
22 | 22 | """A collection of changes to be applied to a database. |
|
23 | 23 | |
|
24 | 24 | Changesets are bound to a repository and manage a set of |
|
25 | 25 | scripts from that repository. |
|
26 | 26 | |
|
27 | 27 | Behaves like a dict, for the most part. Keys are ordered based on step value. |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | 30 | def __init__(self, start, *changes, **k): |
|
31 | 31 | """ |
|
32 | 32 | Give a start version; step must be explicitly stated. |
|
33 | 33 | """ |
|
34 | 34 | self.step = k.pop('step', 1) |
|
35 | 35 | self.start = version.VerNum(start) |
|
36 | 36 | self.end = self.start |
|
37 | 37 | for change in changes: |
|
38 | 38 | self.add(change) |
|
39 | 39 | |
|
40 | 40 | def __iter__(self): |
|
41 | 41 | return iter(self.items()) |
|
42 | 42 | |
|
43 | 43 | def keys(self): |
|
44 | 44 | """ |
|
45 | 45 | In a series of upgrades x -> y, keys are version x. Sorted. |
|
46 | 46 | """ |
|
47 | 47 | ret = super(Changeset, self).keys() |
|
48 | 48 | # Reverse order if downgrading |
|
49 | 49 | ret.sort(reverse=(self.step < 1)) |
|
50 | 50 | return ret |
|
51 | 51 | |
|
52 | 52 | def values(self): |
|
53 | 53 | return [self[k] for k in self.keys()] |
|
54 | 54 | |
|
55 | 55 | def items(self): |
|
56 | 56 | return zip(self.keys(), self.values()) |
|
57 | 57 | |
|
58 | 58 | def add(self, change): |
|
59 | 59 | """Add new change to changeset""" |
|
60 | 60 | key = self.end |
|
61 | 61 | self.end += self.step |
|
62 | 62 | self[key] = change |
|
63 | 63 | |
|
64 | 64 | def run(self, *p, **k): |
|
65 | 65 | """Run the changeset scripts""" |
|
66 | 66 | for _version, script in self: |
|
67 | 67 | script.run(*p, **k) |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | class Repository(pathed.Pathed): |
|
71 | 71 | """A project's change script repository""" |
|
72 | 72 | |
|
73 | 73 | _config = 'migrate.cfg' |
|
74 | 74 | _versions = 'versions' |
|
75 | 75 | |
|
76 | 76 | def __init__(self, path): |
|
77 |
log.debug('Loading repository %s...' |
|
|
77 | log.debug('Loading repository %s...', path) | |
|
78 | 78 | self.verify(path) |
|
79 | 79 | super(Repository, self).__init__(path) |
|
80 | 80 | self.config = cfgparse.Config(os.path.join(self.path, self._config)) |
|
81 | 81 | self.versions = version.Collection(os.path.join(self.path, |
|
82 | 82 | self._versions)) |
|
83 |
log.debug('Repository %s loaded successfully' |
|
|
84 |
log.debug('Config: %r' |
|
|
83 | log.debug('Repository %s loaded successfully', path) | |
|
84 | log.debug('Config: %r', self.config.to_dict()) | |
|
85 | 85 | |
|
86 | 86 | @classmethod |
|
87 | 87 | def verify(cls, path): |
|
88 | 88 | """ |
|
89 | 89 | Ensure the target path is a valid repository. |
|
90 | 90 | |
|
91 | 91 | :raises: :exc:`InvalidRepositoryError <migrate.exceptions.InvalidRepositoryError>` |
|
92 | 92 | """ |
|
93 | 93 | # Ensure the existence of required files |
|
94 | 94 | try: |
|
95 | 95 | cls.require_found(path) |
|
96 | 96 | cls.require_found(os.path.join(path, cls._config)) |
|
97 | 97 | cls.require_found(os.path.join(path, cls._versions)) |
|
98 | 98 | except exceptions.PathNotFoundError as e: |
|
99 | 99 | raise exceptions.InvalidRepositoryError(path) |
|
100 | 100 | |
|
101 | 101 | @classmethod |
|
102 | 102 | def prepare_config(cls, tmpl_dir, name, options=None): |
|
103 | 103 | """ |
|
104 | 104 | Prepare a project configuration file for a new project. |
|
105 | 105 | |
|
106 | 106 | :param tmpl_dir: Path to Repository template |
|
107 | 107 | :param config_file: Name of the config file in Repository template |
|
108 | 108 | :param name: Repository name |
|
109 | 109 | :type tmpl_dir: string |
|
110 | 110 | :type config_file: string |
|
111 | 111 | :type name: string |
|
112 | 112 | :returns: Populated config file |
|
113 | 113 | """ |
|
114 | 114 | if options is None: |
|
115 | 115 | options = {} |
|
116 | 116 | options.setdefault('version_table', 'migrate_version') |
|
117 | 117 | options.setdefault('repository_id', name) |
|
118 | 118 | options.setdefault('required_dbs', []) |
|
119 | 119 | options.setdefault('use_timestamp_numbering', False) |
|
120 | 120 | |
|
121 | 121 | tmpl = open(os.path.join(tmpl_dir, cls._config)).read() |
|
122 | 122 | ret = TempitaTemplate(tmpl).substitute(options) |
|
123 | 123 | |
|
124 | 124 | # cleanup |
|
125 | 125 | del options['__template_name__'] |
|
126 | 126 | |
|
127 | 127 | return ret |
|
128 | 128 | |
|
129 | 129 | @classmethod |
|
130 | 130 | def create(cls, path, name, **opts): |
|
131 | 131 | """Create a repository at a specified path""" |
|
132 | 132 | cls.require_notfound(path) |
|
133 | 133 | theme = opts.pop('templates_theme', None) |
|
134 | 134 | t_path = opts.pop('templates_path', None) |
|
135 | 135 | |
|
136 | 136 | # Create repository |
|
137 | 137 | tmpl_dir = Template(t_path).get_repository(theme=theme) |
|
138 | 138 | shutil.copytree(tmpl_dir, path) |
|
139 | 139 | |
|
140 | 140 | # Edit config defaults |
|
141 | 141 | config_text = cls.prepare_config(tmpl_dir, name, options=opts) |
|
142 | 142 | fd = open(os.path.join(path, cls._config), 'w') |
|
143 | 143 | fd.write(config_text) |
|
144 | 144 | fd.close() |
|
145 | 145 | |
|
146 | 146 | opts['repository_name'] = name |
|
147 | 147 | |
|
148 | 148 | # Create a management script |
|
149 | 149 | manager = os.path.join(path, 'manage.py') |
|
150 | 150 | Repository.create_manage_file(manager, templates_theme=theme, |
|
151 | 151 | templates_path=t_path, **opts) |
|
152 | 152 | |
|
153 | 153 | return cls(path) |
|
154 | 154 | |
|
155 | 155 | def create_script(self, description, **k): |
|
156 | 156 | """API to :meth:`migrate.versioning.version.Collection.create_new_python_version`""" |
|
157 | 157 | |
|
158 | 158 | k['use_timestamp_numbering'] = self.use_timestamp_numbering |
|
159 | 159 | self.versions.create_new_python_version(description, **k) |
|
160 | 160 | |
|
161 | 161 | def create_script_sql(self, database, description, **k): |
|
162 | 162 | """API to :meth:`migrate.versioning.version.Collection.create_new_sql_version`""" |
|
163 | 163 | k['use_timestamp_numbering'] = self.use_timestamp_numbering |
|
164 | 164 | self.versions.create_new_sql_version(database, description, **k) |
|
165 | 165 | |
|
166 | 166 | @property |
|
167 | 167 | def latest(self): |
|
168 | 168 | """API to :attr:`migrate.versioning.version.Collection.latest`""" |
|
169 | 169 | return self.versions.latest |
|
170 | 170 | |
|
171 | 171 | @property |
|
172 | 172 | def version_table(self): |
|
173 | 173 | """Returns version_table name specified in config""" |
|
174 | 174 | return self.config.get('db_settings', 'version_table') |
|
175 | 175 | |
|
176 | 176 | @property |
|
177 | 177 | def id(self): |
|
178 | 178 | """Returns repository id specified in config""" |
|
179 | 179 | # Adjust the value read from kallithea/lib/dbmigrate/migrate.cfg, normally "kallithea_db_migrations" |
|
180 | 180 | s = self.config.get('db_settings', 'repository_id') |
|
181 | 181 | if s == "kallithea_db_migrations": |
|
182 | 182 | s = kallithea.DB_MIGRATIONS |
|
183 | 183 | return s |
|
184 | 184 | |
|
185 | 185 | @property |
|
186 | 186 | def use_timestamp_numbering(self): |
|
187 | 187 | """Returns use_timestamp_numbering specified in config""" |
|
188 | 188 | if self.config.has_option('db_settings', 'use_timestamp_numbering'): |
|
189 | 189 | return self.config.getboolean('db_settings', 'use_timestamp_numbering') |
|
190 | 190 | return False |
|
191 | 191 | |
|
192 | 192 | def version(self, *p, **k): |
|
193 | 193 | """API to :attr:`migrate.versioning.version.Collection.version`""" |
|
194 | 194 | return self.versions.version(*p, **k) |
|
195 | 195 | |
|
196 | 196 | @classmethod |
|
197 | 197 | def clear(cls): |
|
198 | 198 | # TODO: deletes repo |
|
199 | 199 | super(Repository, cls).clear() |
|
200 | 200 | version.Collection.clear() |
|
201 | 201 | |
|
202 | 202 | def changeset(self, database, start, end=None): |
|
203 | 203 | """Create a changeset to migrate this database from ver. start to end/latest. |
|
204 | 204 | |
|
205 | 205 | :param database: name of database to generate changeset |
|
206 | 206 | :param start: version to start at |
|
207 | 207 | :param end: version to end at (latest if None given) |
|
208 | 208 | :type database: string |
|
209 | 209 | :type start: int |
|
210 | 210 | :type end: int |
|
211 | 211 | :returns: :class:`Changeset instance <migration.versioning.repository.Changeset>` |
|
212 | 212 | """ |
|
213 | 213 | start = version.VerNum(start) |
|
214 | 214 | |
|
215 | 215 | if end is None: |
|
216 | 216 | end = self.latest |
|
217 | 217 | else: |
|
218 | 218 | end = version.VerNum(end) |
|
219 | 219 | |
|
220 | 220 | if start <= end: |
|
221 | 221 | step = 1 |
|
222 | 222 | range_mod = 1 |
|
223 | 223 | op = 'upgrade' |
|
224 | 224 | else: |
|
225 | 225 | step = -1 |
|
226 | 226 | range_mod = 0 |
|
227 | 227 | op = 'downgrade' |
|
228 | 228 | |
|
229 | 229 | versions = range(start + range_mod, end + range_mod, step) |
|
230 | 230 | changes = [self.version(v).script(database, op) for v in versions] |
|
231 | 231 | ret = Changeset(start, step=step, *changes) |
|
232 | 232 | return ret |
|
233 | 233 | |
|
234 | 234 | @classmethod |
|
235 | 235 | def create_manage_file(cls, file_, **opts): |
|
236 | 236 | """Create a project management script (manage.py) |
|
237 | 237 | |
|
238 | 238 | :param file_: Destination file to be written |
|
239 | 239 | :param opts: Options that are passed to :func:`migrate.versioning.shell.main` |
|
240 | 240 | """ |
|
241 | 241 | mng_file = Template(opts.pop('templates_path', None))\ |
|
242 | 242 | .get_manage(theme=opts.pop('templates_theme', None)) |
|
243 | 243 | |
|
244 | 244 | tmpl = open(mng_file).read() |
|
245 | 245 | fd = open(file_, 'w') |
|
246 | 246 | fd.write(TempitaTemplate(tmpl).substitute(opts)) |
|
247 | 247 | fd.close() |
@@ -1,57 +1,57 b'' | |||
|
1 | 1 | #!/usr/bin/env python2 |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | import logging |
|
4 | 4 | |
|
5 | 5 | from kallithea.lib.dbmigrate.migrate import exceptions |
|
6 | 6 | from kallithea.lib.dbmigrate.migrate.versioning.config import operations |
|
7 | 7 | from kallithea.lib.dbmigrate.migrate.versioning import pathed |
|
8 | 8 | |
|
9 | 9 | |
|
10 | 10 | log = logging.getLogger(__name__) |
|
11 | 11 | |
|
12 | 12 | class BaseScript(pathed.Pathed): |
|
13 | 13 | """Base class for other types of scripts. |
|
14 | 14 | All scripts have the following properties: |
|
15 | 15 | |
|
16 | 16 | source (script.source()) |
|
17 | 17 | The source code of the script |
|
18 | 18 | version (script.version()) |
|
19 | 19 | The version number of the script |
|
20 | 20 | operations (script.operations()) |
|
21 | 21 | The operations defined by the script: upgrade(), downgrade() or both. |
|
22 | 22 | Returns a tuple of operations. |
|
23 | 23 | Can also check for an operation with ex. script.operation(Script.ops.up) |
|
24 | 24 | """ # TODO: sphinxfy this and implement it correctly |
|
25 | 25 | |
|
26 | 26 | def __init__(self, path): |
|
27 |
log.debug('Loading script %s...' |
|
|
27 | log.debug('Loading script %s...', path) | |
|
28 | 28 | self.verify(path) |
|
29 | 29 | super(BaseScript, self).__init__(path) |
|
30 |
log.debug('Script %s loaded successfully' |
|
|
30 | log.debug('Script %s loaded successfully', path) | |
|
31 | 31 | |
|
32 | 32 | @classmethod |
|
33 | 33 | def verify(cls, path): |
|
34 | 34 | """Ensure this is a valid script |
|
35 | 35 | This version simply ensures the script file's existence |
|
36 | 36 | |
|
37 | 37 | :raises: :exc:`InvalidScriptError <migrate.exceptions.InvalidScriptError>` |
|
38 | 38 | """ |
|
39 | 39 | try: |
|
40 | 40 | cls.require_found(path) |
|
41 | 41 | except: |
|
42 | 42 | raise exceptions.InvalidScriptError(path) |
|
43 | 43 | |
|
44 | 44 | def source(self): |
|
45 | 45 | """:returns: source code of the script. |
|
46 | 46 | :rtype: string |
|
47 | 47 | """ |
|
48 | 48 | fd = open(self.path) |
|
49 | 49 | ret = fd.read() |
|
50 | 50 | fd.close() |
|
51 | 51 | return ret |
|
52 | 52 | |
|
53 | 53 | def run(self, engine): |
|
54 | 54 | """Core of each BaseScript subclass. |
|
55 | 55 | This method executes the script. |
|
56 | 56 | """ |
|
57 | 57 | raise NotImplementedError() |
@@ -1,1097 +1,1097 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.model.db_1_2_0 |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Database Models for Kallithea <=1.2.X |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 08, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | 28 | import os |
|
29 | 29 | import logging |
|
30 | 30 | import datetime |
|
31 | 31 | import traceback |
|
32 | 32 | from datetime import date |
|
33 | 33 | |
|
34 | 34 | from sqlalchemy import * |
|
35 | 35 | from sqlalchemy.ext.hybrid import hybrid_property |
|
36 | 36 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
37 | 37 | from beaker.cache import cache_region, region_invalidate |
|
38 | 38 | |
|
39 | 39 | from kallithea.lib.vcs import get_backend |
|
40 | 40 | from kallithea.lib.vcs.utils.helpers import get_scm |
|
41 | 41 | from kallithea.lib.vcs.exceptions import VCSError |
|
42 | 42 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
43 | 43 | |
|
44 | 44 | from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ |
|
45 | 45 | generate_api_key, safe_unicode |
|
46 | 46 | from kallithea.lib.exceptions import UserGroupsAssignedException |
|
47 | 47 | from kallithea.lib.compat import json |
|
48 | 48 | |
|
49 | 49 | from kallithea.model.meta import Base, Session |
|
50 | 50 | from kallithea.lib.caching_query import FromCache |
|
51 | 51 | |
|
52 | 52 | from kallithea import DB_PREFIX |
|
53 | 53 | |
|
54 | 54 | log = logging.getLogger(__name__) |
|
55 | 55 | |
|
56 | 56 | #============================================================================== |
|
57 | 57 | # BASE CLASSES |
|
58 | 58 | #============================================================================== |
|
59 | 59 | |
|
60 | 60 | class ModelSerializer(json.JSONEncoder): |
|
61 | 61 | """ |
|
62 | 62 | Simple Serializer for JSON, |
|
63 | 63 | |
|
64 | 64 | usage:: |
|
65 | 65 | |
|
66 | 66 | to make object customized for serialization implement a __json__ |
|
67 | 67 | method that will return a dict for serialization into json |
|
68 | 68 | |
|
69 | 69 | example:: |
|
70 | 70 | |
|
71 | 71 | class Task(object): |
|
72 | 72 | |
|
73 | 73 | def __init__(self, name, value): |
|
74 | 74 | self.name = name |
|
75 | 75 | self.value = value |
|
76 | 76 | |
|
77 | 77 | def __json__(self): |
|
78 | 78 | return dict(name=self.name, |
|
79 | 79 | value=self.value) |
|
80 | 80 | |
|
81 | 81 | """ |
|
82 | 82 | |
|
83 | 83 | def default(self, obj): |
|
84 | 84 | |
|
85 | 85 | if hasattr(obj, '__json__'): |
|
86 | 86 | return obj.__json__() |
|
87 | 87 | else: |
|
88 | 88 | return json.JSONEncoder.default(self, obj) |
|
89 | 89 | |
|
90 | 90 | class BaseModel(object): |
|
91 | 91 | """Base Model for all classes |
|
92 | 92 | |
|
93 | 93 | """ |
|
94 | 94 | |
|
95 | 95 | @classmethod |
|
96 | 96 | def _get_keys(cls): |
|
97 | 97 | """return column names for this model """ |
|
98 | 98 | return class_mapper(cls).c.keys() |
|
99 | 99 | |
|
100 | 100 | def get_dict(self): |
|
101 | 101 | """return dict with keys and values corresponding |
|
102 | 102 | to this model data """ |
|
103 | 103 | |
|
104 | 104 | d = {} |
|
105 | 105 | for k in self._get_keys(): |
|
106 | 106 | d[k] = getattr(self, k) |
|
107 | 107 | return d |
|
108 | 108 | |
|
109 | 109 | def get_appstruct(self): |
|
110 | 110 | """return list with keys and values tuples corresponding |
|
111 | 111 | to this model data """ |
|
112 | 112 | |
|
113 | 113 | l = [] |
|
114 | 114 | for k in self._get_keys(): |
|
115 | 115 | l.append((k, getattr(self, k),)) |
|
116 | 116 | return l |
|
117 | 117 | |
|
118 | 118 | def populate_obj(self, populate_dict): |
|
119 | 119 | """populate model with data from given populate_dict""" |
|
120 | 120 | |
|
121 | 121 | for k in self._get_keys(): |
|
122 | 122 | if k in populate_dict: |
|
123 | 123 | setattr(self, k, populate_dict[k]) |
|
124 | 124 | |
|
125 | 125 | @classmethod |
|
126 | 126 | def query(cls): |
|
127 | 127 | return Session.query(cls) |
|
128 | 128 | |
|
129 | 129 | @classmethod |
|
130 | 130 | def get(cls, id_): |
|
131 | 131 | if id_: |
|
132 | 132 | return cls.query().get(id_) |
|
133 | 133 | |
|
134 | 134 | @classmethod |
|
135 | 135 | def getAll(cls): |
|
136 | 136 | return cls.query().all() |
|
137 | 137 | |
|
138 | 138 | @classmethod |
|
139 | 139 | def delete(cls, id_): |
|
140 | 140 | obj = cls.query().get(id_) |
|
141 | 141 | Session.delete(obj) |
|
142 | 142 | Session.commit() |
|
143 | 143 | |
|
144 | 144 | |
|
145 | 145 | class Setting(Base, BaseModel): |
|
146 | 146 | __tablename__ = DB_PREFIX + 'settings' |
|
147 | 147 | __table_args__ = (UniqueConstraint('app_settings_name'), {'extend_existing':True}) |
|
148 | 148 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
149 | 149 | app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
150 | 150 | _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
151 | 151 | |
|
152 | 152 | def __init__(self, k='', v=''): |
|
153 | 153 | self.app_settings_name = k |
|
154 | 154 | self.app_settings_value = v |
|
155 | 155 | |
|
156 | 156 | |
|
157 | 157 | @validates('_app_settings_value') |
|
158 | 158 | def validate_settings_value(self, key, val): |
|
159 | 159 | assert type(val) == unicode |
|
160 | 160 | return val |
|
161 | 161 | |
|
162 | 162 | @hybrid_property |
|
163 | 163 | def app_settings_value(self): |
|
164 | 164 | v = self._app_settings_value |
|
165 | 165 | if v == 'ldap_active': |
|
166 | 166 | v = str2bool(v) |
|
167 | 167 | return v |
|
168 | 168 | |
|
169 | 169 | @app_settings_value.setter |
|
170 | 170 | def app_settings_value(self, val): |
|
171 | 171 | """ |
|
172 | 172 | Setter that will always make sure we use unicode in app_settings_value |
|
173 | 173 | |
|
174 | 174 | :param val: |
|
175 | 175 | """ |
|
176 | 176 | self._app_settings_value = safe_unicode(val) |
|
177 | 177 | |
|
178 | 178 | def __repr__(self): |
|
179 | 179 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
180 | 180 | self.app_settings_name, self.app_settings_value) |
|
181 | 181 | |
|
182 | 182 | |
|
183 | 183 | @classmethod |
|
184 | 184 | def get_by_name(cls, ldap_key): |
|
185 | 185 | return cls.query()\ |
|
186 | 186 | .filter(cls.app_settings_name == ldap_key).scalar() |
|
187 | 187 | |
|
188 | 188 | @classmethod |
|
189 | 189 | def get_app_settings(cls, cache=False): |
|
190 | 190 | |
|
191 | 191 | ret = cls.query() |
|
192 | 192 | |
|
193 | 193 | if cache: |
|
194 | 194 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
195 | 195 | |
|
196 | 196 | if not ret: |
|
197 | 197 | raise Exception('Could not get application settings !') |
|
198 | 198 | settings = {} |
|
199 | 199 | for each in ret: |
|
200 | 200 | settings[each.app_settings_name] = \ |
|
201 | 201 | each.app_settings_value |
|
202 | 202 | |
|
203 | 203 | return settings |
|
204 | 204 | |
|
205 | 205 | @classmethod |
|
206 | 206 | def get_ldap_settings(cls, cache=False): |
|
207 | 207 | ret = cls.query()\ |
|
208 | 208 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
209 | 209 | fd = {} |
|
210 | 210 | for row in ret: |
|
211 | 211 | fd.update({row.app_settings_name:row.app_settings_value}) |
|
212 | 212 | |
|
213 | 213 | return fd |
|
214 | 214 | |
|
215 | 215 | |
|
216 | 216 | class Ui(Base, BaseModel): |
|
217 | 217 | __tablename__ = DB_PREFIX + 'ui' |
|
218 | 218 | __table_args__ = (UniqueConstraint('ui_key'), {'extend_existing':True}) |
|
219 | 219 | |
|
220 | 220 | HOOK_UPDATE = 'changegroup.update' |
|
221 | 221 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
222 | 222 | HOOK_PUSH = 'pretxnchangegroup.push_logger' |
|
223 | 223 | HOOK_PULL = 'preoutgoing.pull_logger' |
|
224 | 224 | |
|
225 | 225 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
226 | 226 | ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
227 | 227 | ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
228 | 228 | ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
229 | 229 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
230 | 230 | |
|
231 | 231 | |
|
232 | 232 | @classmethod |
|
233 | 233 | def get_by_key(cls, key): |
|
234 | 234 | return cls.query().filter(cls.ui_key == key) |
|
235 | 235 | |
|
236 | 236 | |
|
237 | 237 | @classmethod |
|
238 | 238 | def get_builtin_hooks(cls): |
|
239 | 239 | q = cls.query() |
|
240 | 240 | q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, |
|
241 | 241 | cls.HOOK_REPO_SIZE, |
|
242 | 242 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
243 | 243 | return q.all() |
|
244 | 244 | |
|
245 | 245 | @classmethod |
|
246 | 246 | def get_custom_hooks(cls): |
|
247 | 247 | q = cls.query() |
|
248 | 248 | q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, |
|
249 | 249 | cls.HOOK_REPO_SIZE, |
|
250 | 250 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
251 | 251 | q = q.filter(cls.ui_section == 'hooks') |
|
252 | 252 | return q.all() |
|
253 | 253 | |
|
254 | 254 | @classmethod |
|
255 | 255 | def create_or_update_hook(cls, key, val): |
|
256 | 256 | new_ui = cls.get_by_key(key).scalar() or cls() |
|
257 | 257 | new_ui.ui_section = 'hooks' |
|
258 | 258 | new_ui.ui_active = True |
|
259 | 259 | new_ui.ui_key = key |
|
260 | 260 | new_ui.ui_value = val |
|
261 | 261 | |
|
262 | 262 | Session.add(new_ui) |
|
263 | 263 | Session.commit() |
|
264 | 264 | |
|
265 | 265 | |
|
266 | 266 | class User(Base, BaseModel): |
|
267 | 267 | __tablename__ = 'users' |
|
268 | 268 | __table_args__ = (UniqueConstraint('username'), UniqueConstraint('email'), {'extend_existing':True}) |
|
269 | 269 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
270 | 270 | username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
271 | 271 | password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
272 | 272 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) |
|
273 | 273 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
274 | 274 | name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
275 | 275 | lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
276 | 276 | email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
277 | 277 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
278 | 278 | ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
279 | 279 | api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
280 | 280 | |
|
281 | 281 | user_log = relationship('UserLog', cascade='all') |
|
282 | 282 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
283 | 283 | |
|
284 | 284 | repositories = relationship('Repository') |
|
285 | 285 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
286 | 286 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
287 | 287 | |
|
288 | 288 | group_member = relationship('UserGroupMember', cascade='all') |
|
289 | 289 | |
|
290 | 290 | @property |
|
291 | 291 | def full_contact(self): |
|
292 | 292 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
293 | 293 | |
|
294 | 294 | @property |
|
295 | 295 | def short_contact(self): |
|
296 | 296 | return '%s %s' % (self.name, self.lastname) |
|
297 | 297 | |
|
298 | 298 | @property |
|
299 | 299 | def is_admin(self): |
|
300 | 300 | return self.admin |
|
301 | 301 | |
|
302 | 302 | def __repr__(self): |
|
303 | 303 | try: |
|
304 | 304 | return "<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
305 | 305 | self.user_id, self.username) |
|
306 | 306 | except: |
|
307 | 307 | return self.__class__.__name__ |
|
308 | 308 | |
|
309 | 309 | @classmethod |
|
310 | 310 | def get_by_username(cls, username, case_insensitive=False): |
|
311 | 311 | if case_insensitive: |
|
312 | 312 | return Session.query(cls).filter(cls.username.ilike(username)).scalar() |
|
313 | 313 | else: |
|
314 | 314 | return Session.query(cls).filter(cls.username == username).scalar() |
|
315 | 315 | |
|
316 | 316 | @classmethod |
|
317 | 317 | def get_by_api_key(cls, api_key): |
|
318 | 318 | return cls.query().filter(cls.api_key == api_key).one() |
|
319 | 319 | |
|
320 | 320 | def update_lastlogin(self): |
|
321 | 321 | """Update user lastlogin""" |
|
322 | 322 | |
|
323 | 323 | self.last_login = datetime.datetime.now() |
|
324 | 324 | Session.add(self) |
|
325 | 325 | Session.commit() |
|
326 |
log.debug('updated user %s lastlogin' |
|
|
326 | log.debug('updated user %s lastlogin', self.username) | |
|
327 | 327 | |
|
328 | 328 | @classmethod |
|
329 | 329 | def create(cls, form_data): |
|
330 | 330 | from kallithea.lib.auth import get_crypt_password |
|
331 | 331 | |
|
332 | 332 | try: |
|
333 | 333 | new_user = cls() |
|
334 | 334 | for k, v in form_data.items(): |
|
335 | 335 | if k == 'password': |
|
336 | 336 | v = get_crypt_password(v) |
|
337 | 337 | setattr(new_user, k, v) |
|
338 | 338 | |
|
339 | 339 | new_user.api_key = generate_api_key() |
|
340 | 340 | Session.add(new_user) |
|
341 | 341 | Session.commit() |
|
342 | 342 | return new_user |
|
343 | 343 | except: |
|
344 | 344 | log.error(traceback.format_exc()) |
|
345 | 345 | Session.rollback() |
|
346 | 346 | raise |
|
347 | 347 | |
|
348 | 348 | class UserLog(Base, BaseModel): |
|
349 | 349 | __tablename__ = 'user_logs' |
|
350 | 350 | __table_args__ = {'extend_existing':True} |
|
351 | 351 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
352 | 352 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
353 | 353 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
354 | 354 | repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
355 | 355 | user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
356 | 356 | action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
357 | 357 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
358 | 358 | |
|
359 | 359 | @property |
|
360 | 360 | def action_as_day(self): |
|
361 | 361 | return date(*self.action_date.timetuple()[:3]) |
|
362 | 362 | |
|
363 | 363 | user = relationship('User') |
|
364 | 364 | repository = relationship('Repository') |
|
365 | 365 | |
|
366 | 366 | |
|
367 | 367 | class UserGroup(Base, BaseModel): |
|
368 | 368 | __tablename__ = 'users_groups' |
|
369 | 369 | __table_args__ = {'extend_existing':True} |
|
370 | 370 | |
|
371 | 371 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
372 | 372 | users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
373 | 373 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
374 | 374 | |
|
375 | 375 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
376 | 376 | |
|
377 | 377 | def __repr__(self): |
|
378 | 378 | return '<userGroup(%s)>' % (self.users_group_name) |
|
379 | 379 | |
|
380 | 380 | @classmethod |
|
381 | 381 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
382 | 382 | if case_insensitive: |
|
383 | 383 | gr = cls.query()\ |
|
384 | 384 | .filter(cls.users_group_name.ilike(group_name)) |
|
385 | 385 | else: |
|
386 | 386 | gr = cls.query()\ |
|
387 | 387 | .filter(cls.users_group_name == group_name) |
|
388 | 388 | if cache: |
|
389 | 389 | gr = gr.options(FromCache("sql_cache_short", |
|
390 | 390 | "get_user_%s" % group_name)) |
|
391 | 391 | return gr.scalar() |
|
392 | 392 | |
|
393 | 393 | |
|
394 | 394 | @classmethod |
|
395 | 395 | def get(cls, users_group_id, cache=False): |
|
396 | 396 | users_group = cls.query() |
|
397 | 397 | if cache: |
|
398 | 398 | users_group = users_group.options(FromCache("sql_cache_short", |
|
399 | 399 | "get_users_group_%s" % users_group_id)) |
|
400 | 400 | return users_group.get(users_group_id) |
|
401 | 401 | |
|
402 | 402 | @classmethod |
|
403 | 403 | def create(cls, form_data): |
|
404 | 404 | try: |
|
405 | 405 | new_users_group = cls() |
|
406 | 406 | for k, v in form_data.items(): |
|
407 | 407 | setattr(new_users_group, k, v) |
|
408 | 408 | |
|
409 | 409 | Session.add(new_users_group) |
|
410 | 410 | Session.commit() |
|
411 | 411 | return new_users_group |
|
412 | 412 | except: |
|
413 | 413 | log.error(traceback.format_exc()) |
|
414 | 414 | Session.rollback() |
|
415 | 415 | raise |
|
416 | 416 | |
|
417 | 417 | @classmethod |
|
418 | 418 | def update(cls, users_group_id, form_data): |
|
419 | 419 | |
|
420 | 420 | try: |
|
421 | 421 | users_group = cls.get(users_group_id, cache=False) |
|
422 | 422 | |
|
423 | 423 | for k, v in form_data.items(): |
|
424 | 424 | if k == 'users_group_members': |
|
425 | 425 | users_group.members = [] |
|
426 | 426 | Session.flush() |
|
427 | 427 | members_list = [] |
|
428 | 428 | if v: |
|
429 | 429 | v = [v] if isinstance(v, basestring) else v |
|
430 | 430 | for u_id in set(v): |
|
431 | 431 | member = UserGroupMember(users_group_id, u_id) |
|
432 | 432 | members_list.append(member) |
|
433 | 433 | setattr(users_group, 'members', members_list) |
|
434 | 434 | setattr(users_group, k, v) |
|
435 | 435 | |
|
436 | 436 | Session.add(users_group) |
|
437 | 437 | Session.commit() |
|
438 | 438 | except: |
|
439 | 439 | log.error(traceback.format_exc()) |
|
440 | 440 | Session.rollback() |
|
441 | 441 | raise |
|
442 | 442 | |
|
443 | 443 | @classmethod |
|
444 | 444 | def delete(cls, users_group_id): |
|
445 | 445 | try: |
|
446 | 446 | |
|
447 | 447 | # check if this group is not assigned to repo |
|
448 | 448 | assigned_groups = UserGroupRepoToPerm.query()\ |
|
449 | 449 | .filter(UserGroupRepoToPerm.users_group_id == |
|
450 | 450 | users_group_id).all() |
|
451 | 451 | |
|
452 | 452 | if assigned_groups: |
|
453 | 453 | raise UserGroupsAssignedException('RepoGroup assigned to %s' % |
|
454 | 454 | assigned_groups) |
|
455 | 455 | |
|
456 | 456 | users_group = cls.get(users_group_id, cache=False) |
|
457 | 457 | Session.delete(users_group) |
|
458 | 458 | Session.commit() |
|
459 | 459 | except: |
|
460 | 460 | log.error(traceback.format_exc()) |
|
461 | 461 | Session.rollback() |
|
462 | 462 | raise |
|
463 | 463 | |
|
464 | 464 | class UserGroupMember(Base, BaseModel): |
|
465 | 465 | __tablename__ = 'users_groups_members' |
|
466 | 466 | __table_args__ = {'extend_existing':True} |
|
467 | 467 | |
|
468 | 468 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
469 | 469 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
470 | 470 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
471 | 471 | |
|
472 | 472 | user = relationship('User', lazy='joined') |
|
473 | 473 | users_group = relationship('UserGroup') |
|
474 | 474 | |
|
475 | 475 | def __init__(self, gr_id='', u_id=''): |
|
476 | 476 | self.users_group_id = gr_id |
|
477 | 477 | self.user_id = u_id |
|
478 | 478 | |
|
479 | 479 | @staticmethod |
|
480 | 480 | def add_user_to_group(group, user): |
|
481 | 481 | ugm = UserGroupMember() |
|
482 | 482 | ugm.users_group = group |
|
483 | 483 | ugm.user = user |
|
484 | 484 | Session.add(ugm) |
|
485 | 485 | Session.commit() |
|
486 | 486 | return ugm |
|
487 | 487 | |
|
488 | 488 | class Repository(Base, BaseModel): |
|
489 | 489 | __tablename__ = 'repositories' |
|
490 | 490 | __table_args__ = (UniqueConstraint('repo_name'), {'extend_existing':True},) |
|
491 | 491 | |
|
492 | 492 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
493 | 493 | repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
494 | 494 | clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
495 | 495 | repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg') |
|
496 | 496 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
497 | 497 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
498 | 498 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
499 | 499 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
500 | 500 | description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
501 | 501 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
502 | 502 | |
|
503 | 503 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
504 | 504 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
505 | 505 | |
|
506 | 506 | |
|
507 | 507 | user = relationship('User') |
|
508 | 508 | fork = relationship('Repository', remote_side=repo_id) |
|
509 | 509 | group = relationship('RepoGroup') |
|
510 | 510 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
511 | 511 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
512 | 512 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
513 | 513 | |
|
514 | 514 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') |
|
515 | 515 | |
|
516 | 516 | logs = relationship('UserLog', cascade='all') |
|
517 | 517 | |
|
518 | 518 | def __repr__(self): |
|
519 | 519 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
520 | 520 | self.repo_id, self.repo_name) |
|
521 | 521 | |
|
522 | 522 | @classmethod |
|
523 | 523 | def url_sep(cls): |
|
524 | 524 | return '/' |
|
525 | 525 | |
|
526 | 526 | @classmethod |
|
527 | 527 | def get_by_repo_name(cls, repo_name): |
|
528 | 528 | q = Session.query(cls).filter(cls.repo_name == repo_name) |
|
529 | 529 | q = q.options(joinedload(Repository.fork))\ |
|
530 | 530 | .options(joinedload(Repository.user))\ |
|
531 | 531 | .options(joinedload(Repository.group)) |
|
532 | 532 | return q.one() |
|
533 | 533 | |
|
534 | 534 | @classmethod |
|
535 | 535 | def get_repo_forks(cls, repo_id): |
|
536 | 536 | return cls.query().filter(Repository.fork_id == repo_id) |
|
537 | 537 | |
|
538 | 538 | @classmethod |
|
539 | 539 | def base_path(cls): |
|
540 | 540 | """ |
|
541 | 541 | Returns base path when all repos are stored |
|
542 | 542 | |
|
543 | 543 | :param cls: |
|
544 | 544 | """ |
|
545 | 545 | q = Session.query(Ui).filter(Ui.ui_key == |
|
546 | 546 | cls.url_sep()) |
|
547 | 547 | q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
548 | 548 | return q.one().ui_value |
|
549 | 549 | |
|
550 | 550 | @property |
|
551 | 551 | def just_name(self): |
|
552 | 552 | return self.repo_name.split(Repository.url_sep())[-1] |
|
553 | 553 | |
|
554 | 554 | @property |
|
555 | 555 | def groups_with_parents(self): |
|
556 | 556 | groups = [] |
|
557 | 557 | if self.group is None: |
|
558 | 558 | return groups |
|
559 | 559 | |
|
560 | 560 | cur_gr = self.group |
|
561 | 561 | groups.insert(0, cur_gr) |
|
562 | 562 | while 1: |
|
563 | 563 | gr = getattr(cur_gr, 'parent_group', None) |
|
564 | 564 | cur_gr = cur_gr.parent_group |
|
565 | 565 | if gr is None: |
|
566 | 566 | break |
|
567 | 567 | groups.insert(0, gr) |
|
568 | 568 | |
|
569 | 569 | return groups |
|
570 | 570 | |
|
571 | 571 | @property |
|
572 | 572 | def groups_and_repo(self): |
|
573 | 573 | return self.groups_with_parents, self.just_name |
|
574 | 574 | |
|
575 | 575 | @LazyProperty |
|
576 | 576 | def repo_path(self): |
|
577 | 577 | """ |
|
578 | 578 | Returns base full path for that repository means where it actually |
|
579 | 579 | exists on a filesystem |
|
580 | 580 | """ |
|
581 | 581 | q = Session.query(Ui).filter(Ui.ui_key == |
|
582 | 582 | Repository.url_sep()) |
|
583 | 583 | q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
584 | 584 | return q.one().ui_value |
|
585 | 585 | |
|
586 | 586 | @property |
|
587 | 587 | def repo_full_path(self): |
|
588 | 588 | p = [self.repo_path] |
|
589 | 589 | # we need to split the name by / since this is how we store the |
|
590 | 590 | # names in the database, but that eventually needs to be converted |
|
591 | 591 | # into a valid system path |
|
592 | 592 | p += self.repo_name.split(Repository.url_sep()) |
|
593 | 593 | return os.path.join(*p) |
|
594 | 594 | |
|
595 | 595 | def get_new_name(self, repo_name): |
|
596 | 596 | """ |
|
597 | 597 | returns new full repository name based on assigned group and new new |
|
598 | 598 | |
|
599 | 599 | :param group_name: |
|
600 | 600 | """ |
|
601 | 601 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
602 | 602 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
603 | 603 | |
|
604 | 604 | @property |
|
605 | 605 | def _ui(self): |
|
606 | 606 | """ |
|
607 | 607 | Creates an db based ui object for this repository |
|
608 | 608 | """ |
|
609 | 609 | from mercurial import ui |
|
610 | 610 | from mercurial import config |
|
611 | 611 | baseui = ui.ui() |
|
612 | 612 | |
|
613 | 613 | #clean the baseui object |
|
614 | 614 | baseui._ocfg = config.config() |
|
615 | 615 | baseui._ucfg = config.config() |
|
616 | 616 | baseui._tcfg = config.config() |
|
617 | 617 | |
|
618 | 618 | |
|
619 | 619 | ret = Ui.query()\ |
|
620 | 620 | .options(FromCache("sql_cache_short", "repository_repo_ui")).all() |
|
621 | 621 | |
|
622 | 622 | hg_ui = ret |
|
623 | 623 | for ui_ in hg_ui: |
|
624 | 624 | if ui_.ui_active: |
|
625 | 625 | log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section, |
|
626 | 626 | ui_.ui_key, ui_.ui_value) |
|
627 | 627 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
628 | 628 | |
|
629 | 629 | return baseui |
|
630 | 630 | |
|
631 | 631 | @classmethod |
|
632 | 632 | def is_valid(cls, repo_name): |
|
633 | 633 | """ |
|
634 | 634 | returns True if given repo name is a valid filesystem repository |
|
635 | 635 | |
|
636 | 636 | :param cls: |
|
637 | 637 | :param repo_name: |
|
638 | 638 | """ |
|
639 | 639 | from kallithea.lib.utils import is_valid_repo |
|
640 | 640 | |
|
641 | 641 | return is_valid_repo(repo_name, cls.base_path()) |
|
642 | 642 | |
|
643 | 643 | |
|
644 | 644 | #========================================================================== |
|
645 | 645 | # SCM PROPERTIES |
|
646 | 646 | #========================================================================== |
|
647 | 647 | |
|
648 | 648 | def get_changeset(self, rev): |
|
649 | 649 | return get_changeset_safe(self.scm_instance, rev) |
|
650 | 650 | |
|
651 | 651 | @property |
|
652 | 652 | def tip(self): |
|
653 | 653 | return self.get_changeset('tip') |
|
654 | 654 | |
|
655 | 655 | @property |
|
656 | 656 | def author(self): |
|
657 | 657 | return self.tip.author |
|
658 | 658 | |
|
659 | 659 | @property |
|
660 | 660 | def last_change(self): |
|
661 | 661 | return self.scm_instance.last_change |
|
662 | 662 | |
|
663 | 663 | #========================================================================== |
|
664 | 664 | # SCM CACHE INSTANCE |
|
665 | 665 | #========================================================================== |
|
666 | 666 | |
|
667 | 667 | @property |
|
668 | 668 | def invalidate(self): |
|
669 | 669 | return CacheInvalidation.invalidate(self.repo_name) |
|
670 | 670 | |
|
671 | 671 | def set_invalidate(self): |
|
672 | 672 | """ |
|
673 | 673 | set a cache for invalidation for this instance |
|
674 | 674 | """ |
|
675 | 675 | CacheInvalidation.set_invalidate(self.repo_name) |
|
676 | 676 | |
|
677 | 677 | @LazyProperty |
|
678 | 678 | def scm_instance(self): |
|
679 | 679 | return self.__get_instance() |
|
680 | 680 | |
|
681 | 681 | @property |
|
682 | 682 | def scm_instance_cached(self): |
|
683 | 683 | @cache_region('long_term') |
|
684 | 684 | def _c(repo_name): |
|
685 | 685 | return self.__get_instance() |
|
686 | 686 | rn = self.repo_name |
|
687 | 687 | |
|
688 | 688 | inv = self.invalidate |
|
689 | 689 | if inv is not None: |
|
690 | 690 | region_invalidate(_c, None, rn) |
|
691 | 691 | # update our cache |
|
692 | 692 | CacheInvalidation.set_valid(inv.cache_key) |
|
693 | 693 | return _c(rn) |
|
694 | 694 | |
|
695 | 695 | def __get_instance(self): |
|
696 | 696 | |
|
697 | 697 | repo_full_path = self.repo_full_path |
|
698 | 698 | |
|
699 | 699 | try: |
|
700 | 700 | alias = get_scm(repo_full_path)[0] |
|
701 |
log.debug('Creating instance of %s repository' |
|
|
701 | log.debug('Creating instance of %s repository', alias) | |
|
702 | 702 | backend = get_backend(alias) |
|
703 | 703 | except VCSError: |
|
704 | 704 | log.error(traceback.format_exc()) |
|
705 | 705 | log.error('Perhaps this repository is in db and not in ' |
|
706 | 706 | 'filesystem run rescan repositories with ' |
|
707 | 707 | '"destroy old data " option from admin panel') |
|
708 | 708 | return |
|
709 | 709 | |
|
710 | 710 | if alias == 'hg': |
|
711 | 711 | |
|
712 | 712 | repo = backend(safe_str(repo_full_path), create=False, |
|
713 | 713 | baseui=self._ui) |
|
714 | 714 | else: |
|
715 | 715 | repo = backend(repo_full_path, create=False) |
|
716 | 716 | |
|
717 | 717 | return repo |
|
718 | 718 | |
|
719 | 719 | |
|
720 | 720 | class Group(Base, BaseModel): |
|
721 | 721 | __tablename__ = 'groups' |
|
722 | 722 | __table_args__ = (UniqueConstraint('group_name', 'group_parent_id'), |
|
723 | 723 | CheckConstraint('group_id != group_parent_id'), {'extend_existing':True},) |
|
724 | 724 | __mapper_args__ = {'order_by':'group_name'} |
|
725 | 725 | |
|
726 | 726 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
727 | 727 | group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
728 | 728 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
729 | 729 | group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
730 | 730 | |
|
731 | 731 | parent_group = relationship('Group', remote_side=group_id) |
|
732 | 732 | |
|
733 | 733 | def __init__(self, group_name='', parent_group=None): |
|
734 | 734 | self.group_name = group_name |
|
735 | 735 | self.parent_group = parent_group |
|
736 | 736 | |
|
737 | 737 | def __repr__(self): |
|
738 | 738 | return "<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
739 | 739 | self.group_name) |
|
740 | 740 | |
|
741 | 741 | @classmethod |
|
742 | 742 | def groups_choices(cls): |
|
743 | 743 | from webhelpers.html import literal as _literal |
|
744 | 744 | repo_groups = [('', '')] |
|
745 | 745 | sep = ' » ' |
|
746 | 746 | _name = lambda k: _literal(sep.join(k)) |
|
747 | 747 | |
|
748 | 748 | repo_groups.extend([(x.group_id, _name(x.full_path_splitted)) |
|
749 | 749 | for x in cls.query().all()]) |
|
750 | 750 | |
|
751 | 751 | repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0]) |
|
752 | 752 | return repo_groups |
|
753 | 753 | |
|
754 | 754 | @classmethod |
|
755 | 755 | def url_sep(cls): |
|
756 | 756 | return '/' |
|
757 | 757 | |
|
758 | 758 | @classmethod |
|
759 | 759 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
760 | 760 | if case_insensitive: |
|
761 | 761 | gr = cls.query()\ |
|
762 | 762 | .filter(cls.group_name.ilike(group_name)) |
|
763 | 763 | else: |
|
764 | 764 | gr = cls.query()\ |
|
765 | 765 | .filter(cls.group_name == group_name) |
|
766 | 766 | if cache: |
|
767 | 767 | gr = gr.options(FromCache("sql_cache_short", |
|
768 | 768 | "get_group_%s" % group_name)) |
|
769 | 769 | return gr.scalar() |
|
770 | 770 | |
|
771 | 771 | @property |
|
772 | 772 | def parents(self): |
|
773 | 773 | parents_recursion_limit = 5 |
|
774 | 774 | groups = [] |
|
775 | 775 | if self.parent_group is None: |
|
776 | 776 | return groups |
|
777 | 777 | cur_gr = self.parent_group |
|
778 | 778 | groups.insert(0, cur_gr) |
|
779 | 779 | cnt = 0 |
|
780 | 780 | while 1: |
|
781 | 781 | cnt += 1 |
|
782 | 782 | gr = getattr(cur_gr, 'parent_group', None) |
|
783 | 783 | cur_gr = cur_gr.parent_group |
|
784 | 784 | if gr is None: |
|
785 | 785 | break |
|
786 | 786 | if cnt == parents_recursion_limit: |
|
787 | 787 | # this will prevent accidental infinite loops |
|
788 |
log.error('group nested more than %s' |
|
|
788 | log.error('group nested more than %s', | |
|
789 | 789 | parents_recursion_limit) |
|
790 | 790 | break |
|
791 | 791 | |
|
792 | 792 | groups.insert(0, gr) |
|
793 | 793 | return groups |
|
794 | 794 | |
|
795 | 795 | @property |
|
796 | 796 | def children(self): |
|
797 | 797 | return Group.query().filter(Group.parent_group == self) |
|
798 | 798 | |
|
799 | 799 | @property |
|
800 | 800 | def name(self): |
|
801 | 801 | return self.group_name.split(Group.url_sep())[-1] |
|
802 | 802 | |
|
803 | 803 | @property |
|
804 | 804 | def full_path(self): |
|
805 | 805 | return self.group_name |
|
806 | 806 | |
|
807 | 807 | @property |
|
808 | 808 | def full_path_splitted(self): |
|
809 | 809 | return self.group_name.split(Group.url_sep()) |
|
810 | 810 | |
|
811 | 811 | @property |
|
812 | 812 | def repositories(self): |
|
813 | 813 | return Repository.query().filter(Repository.group == self) |
|
814 | 814 | |
|
815 | 815 | @property |
|
816 | 816 | def repositories_recursive_count(self): |
|
817 | 817 | cnt = self.repositories.count() |
|
818 | 818 | |
|
819 | 819 | def children_count(group): |
|
820 | 820 | cnt = 0 |
|
821 | 821 | for child in group.children: |
|
822 | 822 | cnt += child.repositories.count() |
|
823 | 823 | cnt += children_count(child) |
|
824 | 824 | return cnt |
|
825 | 825 | |
|
826 | 826 | return cnt + children_count(self) |
|
827 | 827 | |
|
828 | 828 | |
|
829 | 829 | def get_new_name(self, group_name): |
|
830 | 830 | """ |
|
831 | 831 | returns new full group name based on parent and new name |
|
832 | 832 | |
|
833 | 833 | :param group_name: |
|
834 | 834 | """ |
|
835 | 835 | path_prefix = (self.parent_group.full_path_splitted if |
|
836 | 836 | self.parent_group else []) |
|
837 | 837 | return Group.url_sep().join(path_prefix + [group_name]) |
|
838 | 838 | |
|
839 | 839 | |
|
840 | 840 | class Permission(Base, BaseModel): |
|
841 | 841 | __tablename__ = 'permissions' |
|
842 | 842 | __table_args__ = {'extend_existing':True} |
|
843 | 843 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
844 | 844 | permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
845 | 845 | permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
846 | 846 | |
|
847 | 847 | def __repr__(self): |
|
848 | 848 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
849 | 849 | self.permission_id, self.permission_name) |
|
850 | 850 | |
|
851 | 851 | @classmethod |
|
852 | 852 | def get_by_key(cls, key): |
|
853 | 853 | return cls.query().filter(cls.permission_name == key).scalar() |
|
854 | 854 | |
|
855 | 855 | class UserRepoToPerm(Base, BaseModel): |
|
856 | 856 | __tablename__ = 'repo_to_perm' |
|
857 | 857 | __table_args__ = (UniqueConstraint('user_id', 'repository_id'), {'extend_existing':True}) |
|
858 | 858 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
859 | 859 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
860 | 860 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
861 | 861 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
862 | 862 | |
|
863 | 863 | user = relationship('User') |
|
864 | 864 | permission = relationship('Permission') |
|
865 | 865 | repository = relationship('Repository') |
|
866 | 866 | |
|
867 | 867 | class UserToPerm(Base, BaseModel): |
|
868 | 868 | __tablename__ = 'user_to_perm' |
|
869 | 869 | __table_args__ = (UniqueConstraint('user_id', 'permission_id'), {'extend_existing':True}) |
|
870 | 870 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
871 | 871 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
872 | 872 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
873 | 873 | |
|
874 | 874 | user = relationship('User') |
|
875 | 875 | permission = relationship('Permission') |
|
876 | 876 | |
|
877 | 877 | @classmethod |
|
878 | 878 | def has_perm(cls, user_id, perm): |
|
879 | 879 | if not isinstance(perm, Permission): |
|
880 | 880 | raise Exception('perm needs to be an instance of Permission class') |
|
881 | 881 | |
|
882 | 882 | return cls.query().filter(cls.user_id == user_id)\ |
|
883 | 883 | .filter(cls.permission == perm).scalar() is not None |
|
884 | 884 | |
|
885 | 885 | @classmethod |
|
886 | 886 | def grant_perm(cls, user_id, perm): |
|
887 | 887 | if not isinstance(perm, Permission): |
|
888 | 888 | raise Exception('perm needs to be an instance of Permission class') |
|
889 | 889 | |
|
890 | 890 | new = cls() |
|
891 | 891 | new.user_id = user_id |
|
892 | 892 | new.permission = perm |
|
893 | 893 | try: |
|
894 | 894 | Session.add(new) |
|
895 | 895 | Session.commit() |
|
896 | 896 | except: |
|
897 | 897 | Session.rollback() |
|
898 | 898 | |
|
899 | 899 | |
|
900 | 900 | @classmethod |
|
901 | 901 | def revoke_perm(cls, user_id, perm): |
|
902 | 902 | if not isinstance(perm, Permission): |
|
903 | 903 | raise Exception('perm needs to be an instance of Permission class') |
|
904 | 904 | |
|
905 | 905 | try: |
|
906 | 906 | cls.query().filter(cls.user_id == user_id) \ |
|
907 | 907 | .filter(cls.permission == perm).delete() |
|
908 | 908 | Session.commit() |
|
909 | 909 | except: |
|
910 | 910 | Session.rollback() |
|
911 | 911 | |
|
912 | 912 | class UserGroupRepoToPerm(Base, BaseModel): |
|
913 | 913 | __tablename__ = 'users_group_repo_to_perm' |
|
914 | 914 | __table_args__ = (UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), {'extend_existing':True}) |
|
915 | 915 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
916 | 916 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
917 | 917 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
918 | 918 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
919 | 919 | |
|
920 | 920 | users_group = relationship('UserGroup') |
|
921 | 921 | permission = relationship('Permission') |
|
922 | 922 | repository = relationship('Repository') |
|
923 | 923 | |
|
924 | 924 | def __repr__(self): |
|
925 | 925 | return '<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
926 | 926 | |
|
927 | 927 | class UserGroupToPerm(Base, BaseModel): |
|
928 | 928 | __tablename__ = 'users_group_to_perm' |
|
929 | 929 | __table_args__ = {'extend_existing':True} |
|
930 | 930 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
931 | 931 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
932 | 932 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
933 | 933 | |
|
934 | 934 | users_group = relationship('UserGroup') |
|
935 | 935 | permission = relationship('Permission') |
|
936 | 936 | |
|
937 | 937 | |
|
938 | 938 | @classmethod |
|
939 | 939 | def has_perm(cls, users_group_id, perm): |
|
940 | 940 | if not isinstance(perm, Permission): |
|
941 | 941 | raise Exception('perm needs to be an instance of Permission class') |
|
942 | 942 | |
|
943 | 943 | return cls.query().filter(cls.users_group_id == |
|
944 | 944 | users_group_id)\ |
|
945 | 945 | .filter(cls.permission == perm)\ |
|
946 | 946 | .scalar() is not None |
|
947 | 947 | |
|
948 | 948 | @classmethod |
|
949 | 949 | def grant_perm(cls, users_group_id, perm): |
|
950 | 950 | if not isinstance(perm, Permission): |
|
951 | 951 | raise Exception('perm needs to be an instance of Permission class') |
|
952 | 952 | |
|
953 | 953 | new = cls() |
|
954 | 954 | new.users_group_id = users_group_id |
|
955 | 955 | new.permission = perm |
|
956 | 956 | try: |
|
957 | 957 | Session.add(new) |
|
958 | 958 | Session.commit() |
|
959 | 959 | except: |
|
960 | 960 | Session.rollback() |
|
961 | 961 | |
|
962 | 962 | |
|
963 | 963 | @classmethod |
|
964 | 964 | def revoke_perm(cls, users_group_id, perm): |
|
965 | 965 | if not isinstance(perm, Permission): |
|
966 | 966 | raise Exception('perm needs to be an instance of Permission class') |
|
967 | 967 | |
|
968 | 968 | try: |
|
969 | 969 | cls.query().filter(cls.users_group_id == users_group_id) \ |
|
970 | 970 | .filter(cls.permission == perm).delete() |
|
971 | 971 | Session.commit() |
|
972 | 972 | except: |
|
973 | 973 | Session.rollback() |
|
974 | 974 | |
|
975 | 975 | |
|
976 | 976 | class UserRepoGroupToPerm(Base, BaseModel): |
|
977 | 977 | __tablename__ = 'group_to_perm' |
|
978 | 978 | __table_args__ = (UniqueConstraint('group_id', 'permission_id'), {'extend_existing':True}) |
|
979 | 979 | |
|
980 | 980 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
981 | 981 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
982 | 982 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
983 | 983 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
984 | 984 | |
|
985 | 985 | user = relationship('User') |
|
986 | 986 | permission = relationship('Permission') |
|
987 | 987 | group = relationship('RepoGroup') |
|
988 | 988 | |
|
989 | 989 | class Statistics(Base, BaseModel): |
|
990 | 990 | __tablename__ = 'statistics' |
|
991 | 991 | __table_args__ = (UniqueConstraint('repository_id'), {'extend_existing':True}) |
|
992 | 992 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
993 | 993 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
994 | 994 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
995 | 995 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
996 | 996 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
997 | 997 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
998 | 998 | |
|
999 | 999 | repository = relationship('Repository', single_parent=True) |
|
1000 | 1000 | |
|
1001 | 1001 | class UserFollowing(Base, BaseModel): |
|
1002 | 1002 | __tablename__ = 'user_followings' |
|
1003 | 1003 | __table_args__ = (UniqueConstraint('user_id', 'follows_repository_id'), |
|
1004 | 1004 | UniqueConstraint('user_id', 'follows_user_id') |
|
1005 | 1005 | , {'extend_existing':True}) |
|
1006 | 1006 | |
|
1007 | 1007 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1008 | 1008 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1009 | 1009 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
1010 | 1010 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1011 | 1011 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
1012 | 1012 | |
|
1013 | 1013 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
1014 | 1014 | |
|
1015 | 1015 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
1016 | 1016 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
1017 | 1017 | |
|
1018 | 1018 | |
|
1019 | 1019 | @classmethod |
|
1020 | 1020 | def get_repo_followers(cls, repo_id): |
|
1021 | 1021 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
1022 | 1022 | |
|
1023 | 1023 | class CacheInvalidation(Base, BaseModel): |
|
1024 | 1024 | __tablename__ = 'cache_invalidation' |
|
1025 | 1025 | __table_args__ = (UniqueConstraint('cache_key'), {'extend_existing':True}) |
|
1026 | 1026 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1027 | 1027 | cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1028 | 1028 | cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1029 | 1029 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
1030 | 1030 | |
|
1031 | 1031 | |
|
1032 | 1032 | def __init__(self, cache_key, cache_args=''): |
|
1033 | 1033 | self.cache_key = cache_key |
|
1034 | 1034 | self.cache_args = cache_args |
|
1035 | 1035 | self.cache_active = False |
|
1036 | 1036 | |
|
1037 | 1037 | def __repr__(self): |
|
1038 | 1038 | return "<%s('%s:%s')>" % (self.__class__.__name__, |
|
1039 | 1039 | self.cache_id, self.cache_key) |
|
1040 | 1040 | |
|
1041 | 1041 | @classmethod |
|
1042 | 1042 | def invalidate(cls, key): |
|
1043 | 1043 | """ |
|
1044 | 1044 | Returns Invalidation object if this given key should be invalidated |
|
1045 | 1045 | None otherwise. `cache_active = False` means that this cache |
|
1046 | 1046 | state is not valid and needs to be invalidated |
|
1047 | 1047 | |
|
1048 | 1048 | :param key: |
|
1049 | 1049 | """ |
|
1050 | 1050 | return cls.query()\ |
|
1051 | 1051 | .filter(CacheInvalidation.cache_key == key)\ |
|
1052 | 1052 | .filter(CacheInvalidation.cache_active == False)\ |
|
1053 | 1053 | .scalar() |
|
1054 | 1054 | |
|
1055 | 1055 | @classmethod |
|
1056 | 1056 | def set_invalidate(cls, key): |
|
1057 | 1057 | """ |
|
1058 | 1058 | Mark this Cache key for invalidation |
|
1059 | 1059 | |
|
1060 | 1060 | :param key: |
|
1061 | 1061 | """ |
|
1062 | 1062 | |
|
1063 |
log.debug('marking %s for invalidation' |
|
|
1063 | log.debug('marking %s for invalidation', key) | |
|
1064 | 1064 | inv_obj = Session.query(cls)\ |
|
1065 | 1065 | .filter(cls.cache_key == key).scalar() |
|
1066 | 1066 | if inv_obj: |
|
1067 | 1067 | inv_obj.cache_active = False |
|
1068 | 1068 | else: |
|
1069 | 1069 | log.debug('cache key not found in invalidation db -> creating one') |
|
1070 | 1070 | inv_obj = CacheInvalidation(key) |
|
1071 | 1071 | |
|
1072 | 1072 | try: |
|
1073 | 1073 | Session.add(inv_obj) |
|
1074 | 1074 | Session.commit() |
|
1075 | 1075 | except Exception: |
|
1076 | 1076 | log.error(traceback.format_exc()) |
|
1077 | 1077 | Session.rollback() |
|
1078 | 1078 | |
|
1079 | 1079 | @classmethod |
|
1080 | 1080 | def set_valid(cls, key): |
|
1081 | 1081 | """ |
|
1082 | 1082 | Mark this cache key as active and currently cached |
|
1083 | 1083 | |
|
1084 | 1084 | :param key: |
|
1085 | 1085 | """ |
|
1086 | 1086 | inv_obj = Session.query(CacheInvalidation)\ |
|
1087 | 1087 | .filter(CacheInvalidation.cache_key == key).scalar() |
|
1088 | 1088 | inv_obj.cache_active = True |
|
1089 | 1089 | Session.add(inv_obj) |
|
1090 | 1090 | Session.commit() |
|
1091 | 1091 | |
|
1092 | 1092 | class DbMigrateVersion(Base, BaseModel): |
|
1093 | 1093 | __tablename__ = 'db_migrate_version' |
|
1094 | 1094 | __table_args__ = {'extend_existing':True} |
|
1095 | 1095 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1096 | 1096 | repository_path = Column('repository_path', Text) |
|
1097 | 1097 | version = Column('version', Integer) |
@@ -1,1322 +1,1322 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.model.db_1_3_0 |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Database Models for Kallithea <=1.3.X |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Apr 08, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | |
|
30 | 30 | import os |
|
31 | 31 | import logging |
|
32 | 32 | import datetime |
|
33 | 33 | import traceback |
|
34 | 34 | from collections import defaultdict |
|
35 | 35 | |
|
36 | 36 | from sqlalchemy import * |
|
37 | 37 | from sqlalchemy.ext.hybrid import hybrid_property |
|
38 | 38 | from sqlalchemy.orm import relationship, joinedload, class_mapper, validates |
|
39 | 39 | from beaker.cache import cache_region, region_invalidate |
|
40 | 40 | |
|
41 | 41 | from kallithea.lib.vcs import get_backend |
|
42 | 42 | from kallithea.lib.vcs.utils.helpers import get_scm |
|
43 | 43 | from kallithea.lib.vcs.exceptions import VCSError |
|
44 | 44 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
45 | 45 | |
|
46 | 46 | from kallithea.lib.utils2 import str2bool, safe_str, get_changeset_safe, \ |
|
47 | 47 | safe_unicode |
|
48 | 48 | from kallithea.lib.compat import json |
|
49 | 49 | from kallithea.lib.caching_query import FromCache |
|
50 | 50 | |
|
51 | 51 | from kallithea.model.meta import Base, Session |
|
52 | 52 | import hashlib |
|
53 | 53 | |
|
54 | 54 | from kallithea import DB_PREFIX |
|
55 | 55 | |
|
56 | 56 | log = logging.getLogger(__name__) |
|
57 | 57 | |
|
58 | 58 | #============================================================================== |
|
59 | 59 | # BASE CLASSES |
|
60 | 60 | #============================================================================== |
|
61 | 61 | |
|
62 | 62 | _hash_key = lambda k: hashlib.md5(safe_str(k)).hexdigest() |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | class ModelSerializer(json.JSONEncoder): |
|
66 | 66 | """ |
|
67 | 67 | Simple Serializer for JSON, |
|
68 | 68 | |
|
69 | 69 | usage:: |
|
70 | 70 | |
|
71 | 71 | to make object customized for serialization implement a __json__ |
|
72 | 72 | method that will return a dict for serialization into json |
|
73 | 73 | |
|
74 | 74 | example:: |
|
75 | 75 | |
|
76 | 76 | class Task(object): |
|
77 | 77 | |
|
78 | 78 | def __init__(self, name, value): |
|
79 | 79 | self.name = name |
|
80 | 80 | self.value = value |
|
81 | 81 | |
|
82 | 82 | def __json__(self): |
|
83 | 83 | return dict(name=self.name, |
|
84 | 84 | value=self.value) |
|
85 | 85 | |
|
86 | 86 | """ |
|
87 | 87 | |
|
88 | 88 | def default(self, obj): |
|
89 | 89 | |
|
90 | 90 | if hasattr(obj, '__json__'): |
|
91 | 91 | return obj.__json__() |
|
92 | 92 | else: |
|
93 | 93 | return json.JSONEncoder.default(self, obj) |
|
94 | 94 | |
|
95 | 95 | |
|
96 | 96 | class BaseModel(object): |
|
97 | 97 | """ |
|
98 | 98 | Base Model for all classess |
|
99 | 99 | """ |
|
100 | 100 | |
|
101 | 101 | @classmethod |
|
102 | 102 | def _get_keys(cls): |
|
103 | 103 | """return column names for this model """ |
|
104 | 104 | return class_mapper(cls).c.keys() |
|
105 | 105 | |
|
106 | 106 | def get_dict(self): |
|
107 | 107 | """ |
|
108 | 108 | return dict with keys and values corresponding |
|
109 | 109 | to this model data """ |
|
110 | 110 | |
|
111 | 111 | d = {} |
|
112 | 112 | for k in self._get_keys(): |
|
113 | 113 | d[k] = getattr(self, k) |
|
114 | 114 | |
|
115 | 115 | # also use __json__() if present to get additional fields |
|
116 | 116 | for k, val in getattr(self, '__json__', lambda: {})().iteritems(): |
|
117 | 117 | d[k] = val |
|
118 | 118 | return d |
|
119 | 119 | |
|
120 | 120 | def get_appstruct(self): |
|
121 | 121 | """return list with keys and values tuples corresponding |
|
122 | 122 | to this model data """ |
|
123 | 123 | |
|
124 | 124 | l = [] |
|
125 | 125 | for k in self._get_keys(): |
|
126 | 126 | l.append((k, getattr(self, k),)) |
|
127 | 127 | return l |
|
128 | 128 | |
|
129 | 129 | def populate_obj(self, populate_dict): |
|
130 | 130 | """populate model with data from given populate_dict""" |
|
131 | 131 | |
|
132 | 132 | for k in self._get_keys(): |
|
133 | 133 | if k in populate_dict: |
|
134 | 134 | setattr(self, k, populate_dict[k]) |
|
135 | 135 | |
|
136 | 136 | @classmethod |
|
137 | 137 | def query(cls): |
|
138 | 138 | return Session.query(cls) |
|
139 | 139 | |
|
140 | 140 | @classmethod |
|
141 | 141 | def get(cls, id_): |
|
142 | 142 | if id_: |
|
143 | 143 | return cls.query().get(id_) |
|
144 | 144 | |
|
145 | 145 | @classmethod |
|
146 | 146 | def getAll(cls): |
|
147 | 147 | return cls.query().all() |
|
148 | 148 | |
|
149 | 149 | @classmethod |
|
150 | 150 | def delete(cls, id_): |
|
151 | 151 | obj = cls.query().get(id_) |
|
152 | 152 | Session.delete(obj) |
|
153 | 153 | |
|
154 | 154 | def __repr__(self): |
|
155 | 155 | if hasattr(self, '__unicode__'): |
|
156 | 156 | # python repr needs to return str |
|
157 | 157 | return safe_str(self.__unicode__()) |
|
158 | 158 | return '<DB:%s>' % (self.__class__.__name__) |
|
159 | 159 | |
|
160 | 160 | class Setting(Base, BaseModel): |
|
161 | 161 | __tablename__ = DB_PREFIX + 'settings' |
|
162 | 162 | __table_args__ = ( |
|
163 | 163 | UniqueConstraint('app_settings_name'), |
|
164 | 164 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
165 | 165 | 'mysql_charset': 'utf8'} |
|
166 | 166 | ) |
|
167 | 167 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
168 | 168 | app_settings_name = Column("app_settings_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
169 | 169 | _app_settings_value = Column("app_settings_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
170 | 170 | |
|
171 | 171 | def __init__(self, k='', v=''): |
|
172 | 172 | self.app_settings_name = k |
|
173 | 173 | self.app_settings_value = v |
|
174 | 174 | |
|
175 | 175 | @validates('_app_settings_value') |
|
176 | 176 | def validate_settings_value(self, key, val): |
|
177 | 177 | assert type(val) == unicode |
|
178 | 178 | return val |
|
179 | 179 | |
|
180 | 180 | @hybrid_property |
|
181 | 181 | def app_settings_value(self): |
|
182 | 182 | v = self._app_settings_value |
|
183 | 183 | if self.app_settings_name == 'ldap_active': |
|
184 | 184 | v = str2bool(v) |
|
185 | 185 | return v |
|
186 | 186 | |
|
187 | 187 | @app_settings_value.setter |
|
188 | 188 | def app_settings_value(self, val): |
|
189 | 189 | """ |
|
190 | 190 | Setter that will always make sure we use unicode in app_settings_value |
|
191 | 191 | |
|
192 | 192 | :param val: |
|
193 | 193 | """ |
|
194 | 194 | self._app_settings_value = safe_unicode(val) |
|
195 | 195 | |
|
196 | 196 | def __unicode__(self): |
|
197 | 197 | return u"<%s('%s:%s')>" % ( |
|
198 | 198 | self.__class__.__name__, |
|
199 | 199 | self.app_settings_name, self.app_settings_value |
|
200 | 200 | ) |
|
201 | 201 | |
|
202 | 202 | @classmethod |
|
203 | 203 | def get_by_name(cls, ldap_key): |
|
204 | 204 | return cls.query()\ |
|
205 | 205 | .filter(cls.app_settings_name == ldap_key).scalar() |
|
206 | 206 | |
|
207 | 207 | @classmethod |
|
208 | 208 | def get_app_settings(cls, cache=False): |
|
209 | 209 | |
|
210 | 210 | ret = cls.query() |
|
211 | 211 | |
|
212 | 212 | if cache: |
|
213 | 213 | ret = ret.options(FromCache("sql_cache_short", "get_hg_settings")) |
|
214 | 214 | |
|
215 | 215 | if not ret: |
|
216 | 216 | raise Exception('Could not get application settings !') |
|
217 | 217 | settings = {} |
|
218 | 218 | for each in ret: |
|
219 | 219 | settings[each.app_settings_name] = \ |
|
220 | 220 | each.app_settings_value |
|
221 | 221 | |
|
222 | 222 | return settings |
|
223 | 223 | |
|
224 | 224 | @classmethod |
|
225 | 225 | def get_ldap_settings(cls, cache=False): |
|
226 | 226 | ret = cls.query()\ |
|
227 | 227 | .filter(cls.app_settings_name.startswith('ldap_')).all() |
|
228 | 228 | fd = {} |
|
229 | 229 | for row in ret: |
|
230 | 230 | fd.update({row.app_settings_name:row.app_settings_value}) |
|
231 | 231 | |
|
232 | 232 | return fd |
|
233 | 233 | |
|
234 | 234 | |
|
235 | 235 | class Ui(Base, BaseModel): |
|
236 | 236 | __tablename__ = DB_PREFIX + 'ui' |
|
237 | 237 | __table_args__ = ( |
|
238 | 238 | UniqueConstraint('ui_key'), |
|
239 | 239 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
240 | 240 | 'mysql_charset': 'utf8'} |
|
241 | 241 | ) |
|
242 | 242 | |
|
243 | 243 | HOOK_UPDATE = 'changegroup.update' |
|
244 | 244 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
245 | 245 | HOOK_PUSH = 'pretxnchangegroup.push_logger' |
|
246 | 246 | HOOK_PULL = 'preoutgoing.pull_logger' |
|
247 | 247 | |
|
248 | 248 | ui_id = Column("ui_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
249 | 249 | ui_section = Column("ui_section", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
250 | 250 | ui_key = Column("ui_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
251 | 251 | ui_value = Column("ui_value", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
252 | 252 | ui_active = Column("ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
253 | 253 | |
|
254 | 254 | @classmethod |
|
255 | 255 | def get_by_key(cls, key): |
|
256 | 256 | return cls.query().filter(cls.ui_key == key) |
|
257 | 257 | |
|
258 | 258 | @classmethod |
|
259 | 259 | def get_builtin_hooks(cls): |
|
260 | 260 | q = cls.query() |
|
261 | 261 | q = q.filter(cls.ui_key.in_([cls.HOOK_UPDATE, |
|
262 | 262 | cls.HOOK_REPO_SIZE, |
|
263 | 263 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
264 | 264 | return q.all() |
|
265 | 265 | |
|
266 | 266 | @classmethod |
|
267 | 267 | def get_custom_hooks(cls): |
|
268 | 268 | q = cls.query() |
|
269 | 269 | q = q.filter(~cls.ui_key.in_([cls.HOOK_UPDATE, |
|
270 | 270 | cls.HOOK_REPO_SIZE, |
|
271 | 271 | cls.HOOK_PUSH, cls.HOOK_PULL])) |
|
272 | 272 | q = q.filter(cls.ui_section == 'hooks') |
|
273 | 273 | return q.all() |
|
274 | 274 | |
|
275 | 275 | @classmethod |
|
276 | 276 | def create_or_update_hook(cls, key, val): |
|
277 | 277 | new_ui = cls.get_by_key(key).scalar() or cls() |
|
278 | 278 | new_ui.ui_section = 'hooks' |
|
279 | 279 | new_ui.ui_active = True |
|
280 | 280 | new_ui.ui_key = key |
|
281 | 281 | new_ui.ui_value = val |
|
282 | 282 | |
|
283 | 283 | Session.add(new_ui) |
|
284 | 284 | |
|
285 | 285 | |
|
286 | 286 | class User(Base, BaseModel): |
|
287 | 287 | __tablename__ = 'users' |
|
288 | 288 | __table_args__ = ( |
|
289 | 289 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
290 | 290 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
291 | 291 | 'mysql_charset': 'utf8'} |
|
292 | 292 | ) |
|
293 | 293 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
294 | 294 | username = Column("username", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
295 | 295 | password = Column("password", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
296 | 296 | active = Column("active", Boolean(), nullable=True, unique=None, default=None) |
|
297 | 297 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
298 | 298 | name = Column("name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
299 | 299 | lastname = Column("lastname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
300 | 300 | _email = Column("email", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
301 | 301 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
302 | 302 | ldap_dn = Column("ldap_dn", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
303 | 303 | api_key = Column("api_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
304 | 304 | |
|
305 | 305 | user_log = relationship('UserLog', cascade='all') |
|
306 | 306 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all') |
|
307 | 307 | |
|
308 | 308 | repositories = relationship('Repository') |
|
309 | 309 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all') |
|
310 | 310 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all') |
|
311 | 311 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all') |
|
312 | 312 | |
|
313 | 313 | group_member = relationship('UserGroupMember', cascade='all') |
|
314 | 314 | |
|
315 | 315 | notifications = relationship('UserNotification', cascade='all') |
|
316 | 316 | # notifications assigned to this user |
|
317 | 317 | user_created_notifications = relationship('Notification', cascade='all') |
|
318 | 318 | # comments created by this user |
|
319 | 319 | user_comments = relationship('ChangesetComment', cascade='all') |
|
320 | 320 | |
|
321 | 321 | @hybrid_property |
|
322 | 322 | def email(self): |
|
323 | 323 | return self._email |
|
324 | 324 | |
|
325 | 325 | @email.setter |
|
326 | 326 | def email(self, val): |
|
327 | 327 | self._email = val.lower() if val else None |
|
328 | 328 | |
|
329 | 329 | @property |
|
330 | 330 | def full_name(self): |
|
331 | 331 | return '%s %s' % (self.name, self.lastname) |
|
332 | 332 | |
|
333 | 333 | @property |
|
334 | 334 | def full_name_or_username(self): |
|
335 | 335 | return ('%s %s' % (self.name, self.lastname) |
|
336 | 336 | if (self.name and self.lastname) else self.username) |
|
337 | 337 | |
|
338 | 338 | @property |
|
339 | 339 | def full_contact(self): |
|
340 | 340 | return '%s %s <%s>' % (self.name, self.lastname, self.email) |
|
341 | 341 | |
|
342 | 342 | @property |
|
343 | 343 | def short_contact(self): |
|
344 | 344 | return '%s %s' % (self.name, self.lastname) |
|
345 | 345 | |
|
346 | 346 | @property |
|
347 | 347 | def is_admin(self): |
|
348 | 348 | return self.admin |
|
349 | 349 | |
|
350 | 350 | def __unicode__(self): |
|
351 | 351 | return u"<%s('id:%s:%s')>" % (self.__class__.__name__, |
|
352 | 352 | self.user_id, self.username) |
|
353 | 353 | |
|
354 | 354 | @classmethod |
|
355 | 355 | def get_by_username(cls, username, case_insensitive=False, cache=False): |
|
356 | 356 | if case_insensitive: |
|
357 | 357 | q = cls.query().filter(cls.username.ilike(username)) |
|
358 | 358 | else: |
|
359 | 359 | q = cls.query().filter(cls.username == username) |
|
360 | 360 | |
|
361 | 361 | if cache: |
|
362 | 362 | q = q.options(FromCache( |
|
363 | 363 | "sql_cache_short", |
|
364 | 364 | "get_user_%s" % _hash_key(username) |
|
365 | 365 | ) |
|
366 | 366 | ) |
|
367 | 367 | return q.scalar() |
|
368 | 368 | |
|
369 | 369 | @classmethod |
|
370 | 370 | def get_by_api_key(cls, api_key, cache=False): |
|
371 | 371 | q = cls.query().filter(cls.api_key == api_key) |
|
372 | 372 | |
|
373 | 373 | if cache: |
|
374 | 374 | q = q.options(FromCache("sql_cache_short", |
|
375 | 375 | "get_api_key_%s" % api_key)) |
|
376 | 376 | return q.scalar() |
|
377 | 377 | |
|
378 | 378 | @classmethod |
|
379 | 379 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
380 | 380 | if case_insensitive: |
|
381 | 381 | q = cls.query().filter(cls.email.ilike(email)) |
|
382 | 382 | else: |
|
383 | 383 | q = cls.query().filter(cls.email == email) |
|
384 | 384 | |
|
385 | 385 | if cache: |
|
386 | 386 | q = q.options(FromCache("sql_cache_short", |
|
387 | 387 | "get_api_key_%s" % email)) |
|
388 | 388 | return q.scalar() |
|
389 | 389 | |
|
390 | 390 | def update_lastlogin(self): |
|
391 | 391 | """Update user lastlogin""" |
|
392 | 392 | self.last_login = datetime.datetime.now() |
|
393 | 393 | Session.add(self) |
|
394 |
log.debug('updated user %s lastlogin' |
|
|
394 | log.debug('updated user %s lastlogin', self.username) | |
|
395 | 395 | |
|
396 | 396 | def __json__(self): |
|
397 | 397 | return dict( |
|
398 | 398 | user_id=self.user_id, |
|
399 | 399 | first_name=self.name, |
|
400 | 400 | last_name=self.lastname, |
|
401 | 401 | email=self.email, |
|
402 | 402 | full_name=self.full_name, |
|
403 | 403 | full_name_or_username=self.full_name_or_username, |
|
404 | 404 | short_contact=self.short_contact, |
|
405 | 405 | full_contact=self.full_contact |
|
406 | 406 | ) |
|
407 | 407 | |
|
408 | 408 | |
|
409 | 409 | class UserLog(Base, BaseModel): |
|
410 | 410 | __tablename__ = 'user_logs' |
|
411 | 411 | __table_args__ = ( |
|
412 | 412 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
413 | 413 | 'mysql_charset': 'utf8'}, |
|
414 | 414 | ) |
|
415 | 415 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
416 | 416 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
417 | 417 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True) |
|
418 | 418 | repository_name = Column("repository_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
419 | 419 | user_ip = Column("user_ip", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
420 | 420 | action = Column("action", UnicodeText(length=1200000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
421 | 421 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
422 | 422 | |
|
423 | 423 | @property |
|
424 | 424 | def action_as_day(self): |
|
425 | 425 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
426 | 426 | |
|
427 | 427 | user = relationship('User') |
|
428 | 428 | repository = relationship('Repository', cascade='') |
|
429 | 429 | |
|
430 | 430 | |
|
431 | 431 | class UserGroup(Base, BaseModel): |
|
432 | 432 | __tablename__ = 'users_groups' |
|
433 | 433 | __table_args__ = ( |
|
434 | 434 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
435 | 435 | 'mysql_charset': 'utf8'}, |
|
436 | 436 | ) |
|
437 | 437 | |
|
438 | 438 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
439 | 439 | users_group_name = Column("users_group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
440 | 440 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
441 | 441 | |
|
442 | 442 | members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined") |
|
443 | 443 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all') |
|
444 | 444 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
445 | 445 | |
|
446 | 446 | def __unicode__(self): |
|
447 | 447 | return u'<userGroup(%s)>' % (self.users_group_name) |
|
448 | 448 | |
|
449 | 449 | @classmethod |
|
450 | 450 | def get_by_group_name(cls, group_name, cache=False, |
|
451 | 451 | case_insensitive=False): |
|
452 | 452 | if case_insensitive: |
|
453 | 453 | q = cls.query().filter(cls.users_group_name.ilike(group_name)) |
|
454 | 454 | else: |
|
455 | 455 | q = cls.query().filter(cls.users_group_name == group_name) |
|
456 | 456 | if cache: |
|
457 | 457 | q = q.options(FromCache( |
|
458 | 458 | "sql_cache_short", |
|
459 | 459 | "get_user_%s" % _hash_key(group_name) |
|
460 | 460 | ) |
|
461 | 461 | ) |
|
462 | 462 | return q.scalar() |
|
463 | 463 | |
|
464 | 464 | @classmethod |
|
465 | 465 | def get(cls, users_group_id, cache=False): |
|
466 | 466 | users_group = cls.query() |
|
467 | 467 | if cache: |
|
468 | 468 | users_group = users_group.options(FromCache("sql_cache_short", |
|
469 | 469 | "get_users_group_%s" % users_group_id)) |
|
470 | 470 | return users_group.get(users_group_id) |
|
471 | 471 | |
|
472 | 472 | |
|
473 | 473 | class UserGroupMember(Base, BaseModel): |
|
474 | 474 | __tablename__ = 'users_groups_members' |
|
475 | 475 | __table_args__ = ( |
|
476 | 476 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
477 | 477 | 'mysql_charset': 'utf8'}, |
|
478 | 478 | ) |
|
479 | 479 | |
|
480 | 480 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
481 | 481 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
482 | 482 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
483 | 483 | |
|
484 | 484 | user = relationship('User', lazy='joined') |
|
485 | 485 | users_group = relationship('UserGroup') |
|
486 | 486 | |
|
487 | 487 | def __init__(self, gr_id='', u_id=''): |
|
488 | 488 | self.users_group_id = gr_id |
|
489 | 489 | self.user_id = u_id |
|
490 | 490 | |
|
491 | 491 | |
|
492 | 492 | class Repository(Base, BaseModel): |
|
493 | 493 | __tablename__ = 'repositories' |
|
494 | 494 | __table_args__ = ( |
|
495 | 495 | UniqueConstraint('repo_name'), |
|
496 | 496 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
497 | 497 | 'mysql_charset': 'utf8'}, |
|
498 | 498 | ) |
|
499 | 499 | |
|
500 | 500 | repo_id = Column("repo_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
501 | 501 | repo_name = Column("repo_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
502 | 502 | clone_uri = Column("clone_uri", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=False, default=None) |
|
503 | 503 | repo_type = Column("repo_type", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=False, default='hg') |
|
504 | 504 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
505 | 505 | private = Column("private", Boolean(), nullable=True, unique=None, default=None) |
|
506 | 506 | enable_statistics = Column("statistics", Boolean(), nullable=True, unique=None, default=True) |
|
507 | 507 | enable_downloads = Column("downloads", Boolean(), nullable=True, unique=None, default=True) |
|
508 | 508 | description = Column("description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
509 | 509 | created_on = Column('created_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
510 | 510 | |
|
511 | 511 | fork_id = Column("fork_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=False, default=None) |
|
512 | 512 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=False, default=None) |
|
513 | 513 | |
|
514 | 514 | user = relationship('User') |
|
515 | 515 | fork = relationship('Repository', remote_side=repo_id) |
|
516 | 516 | group = relationship('RepoGroup') |
|
517 | 517 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
518 | 518 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all') |
|
519 | 519 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
520 | 520 | |
|
521 | 521 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all') |
|
522 | 522 | |
|
523 | 523 | logs = relationship('UserLog') |
|
524 | 524 | |
|
525 | 525 | def __unicode__(self): |
|
526 | 526 | return u"<%s('%s:%s')>" % (self.__class__.__name__,self.repo_id, |
|
527 | 527 | self.repo_name) |
|
528 | 528 | |
|
529 | 529 | @classmethod |
|
530 | 530 | def url_sep(cls): |
|
531 | 531 | return '/' |
|
532 | 532 | |
|
533 | 533 | @classmethod |
|
534 | 534 | def get_by_repo_name(cls, repo_name): |
|
535 | 535 | q = Session.query(cls).filter(cls.repo_name == repo_name) |
|
536 | 536 | q = q.options(joinedload(Repository.fork))\ |
|
537 | 537 | .options(joinedload(Repository.user))\ |
|
538 | 538 | .options(joinedload(Repository.group)) |
|
539 | 539 | return q.scalar() |
|
540 | 540 | |
|
541 | 541 | @classmethod |
|
542 | 542 | def get_repo_forks(cls, repo_id): |
|
543 | 543 | return cls.query().filter(Repository.fork_id == repo_id) |
|
544 | 544 | |
|
545 | 545 | @classmethod |
|
546 | 546 | def base_path(cls): |
|
547 | 547 | """ |
|
548 | 548 | Returns base path when all repos are stored |
|
549 | 549 | |
|
550 | 550 | :param cls: |
|
551 | 551 | """ |
|
552 | 552 | q = Session.query(Ui)\ |
|
553 | 553 | .filter(Ui.ui_key == cls.url_sep()) |
|
554 | 554 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
555 | 555 | return q.one().ui_value |
|
556 | 556 | |
|
557 | 557 | @property |
|
558 | 558 | def just_name(self): |
|
559 | 559 | return self.repo_name.split(Repository.url_sep())[-1] |
|
560 | 560 | |
|
561 | 561 | @property |
|
562 | 562 | def groups_with_parents(self): |
|
563 | 563 | groups = [] |
|
564 | 564 | if self.group is None: |
|
565 | 565 | return groups |
|
566 | 566 | |
|
567 | 567 | cur_gr = self.group |
|
568 | 568 | groups.insert(0, cur_gr) |
|
569 | 569 | while 1: |
|
570 | 570 | gr = getattr(cur_gr, 'parent_group', None) |
|
571 | 571 | cur_gr = cur_gr.parent_group |
|
572 | 572 | if gr is None: |
|
573 | 573 | break |
|
574 | 574 | groups.insert(0, gr) |
|
575 | 575 | |
|
576 | 576 | return groups |
|
577 | 577 | |
|
578 | 578 | @property |
|
579 | 579 | def groups_and_repo(self): |
|
580 | 580 | return self.groups_with_parents, self.just_name |
|
581 | 581 | |
|
582 | 582 | @LazyProperty |
|
583 | 583 | def repo_path(self): |
|
584 | 584 | """ |
|
585 | 585 | Returns base full path for that repository means where it actually |
|
586 | 586 | exists on a filesystem |
|
587 | 587 | """ |
|
588 | 588 | q = Session.query(Ui).filter(Ui.ui_key == |
|
589 | 589 | Repository.url_sep()) |
|
590 | 590 | q = q.options(FromCache("sql_cache_short", "repository_repo_path")) |
|
591 | 591 | return q.one().ui_value |
|
592 | 592 | |
|
593 | 593 | @property |
|
594 | 594 | def repo_full_path(self): |
|
595 | 595 | p = [self.repo_path] |
|
596 | 596 | # we need to split the name by / since this is how we store the |
|
597 | 597 | # names in the database, but that eventually needs to be converted |
|
598 | 598 | # into a valid system path |
|
599 | 599 | p += self.repo_name.split(Repository.url_sep()) |
|
600 | 600 | return os.path.join(*p) |
|
601 | 601 | |
|
602 | 602 | def get_new_name(self, repo_name): |
|
603 | 603 | """ |
|
604 | 604 | returns new full repository name based on assigned group and new new |
|
605 | 605 | |
|
606 | 606 | :param group_name: |
|
607 | 607 | """ |
|
608 | 608 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
609 | 609 | return Repository.url_sep().join(path_prefix + [repo_name]) |
|
610 | 610 | |
|
611 | 611 | @property |
|
612 | 612 | def _ui(self): |
|
613 | 613 | """ |
|
614 | 614 | Creates an db based ui object for this repository |
|
615 | 615 | """ |
|
616 | 616 | from mercurial import ui |
|
617 | 617 | from mercurial import config |
|
618 | 618 | baseui = ui.ui() |
|
619 | 619 | |
|
620 | 620 | #clean the baseui object |
|
621 | 621 | baseui._ocfg = config.config() |
|
622 | 622 | baseui._ucfg = config.config() |
|
623 | 623 | baseui._tcfg = config.config() |
|
624 | 624 | |
|
625 | 625 | ret = Ui.query()\ |
|
626 | 626 | .options(FromCache("sql_cache_short", "repository_repo_ui")).all() |
|
627 | 627 | |
|
628 | 628 | hg_ui = ret |
|
629 | 629 | for ui_ in hg_ui: |
|
630 | 630 | if ui_.ui_active: |
|
631 | 631 | log.debug('settings ui from db: [%s] %s=%s', ui_.ui_section, |
|
632 | 632 | ui_.ui_key, ui_.ui_value) |
|
633 | 633 | baseui.setconfig(ui_.ui_section, ui_.ui_key, ui_.ui_value) |
|
634 | 634 | |
|
635 | 635 | return baseui |
|
636 | 636 | |
|
637 | 637 | @classmethod |
|
638 | 638 | def is_valid(cls, repo_name): |
|
639 | 639 | """ |
|
640 | 640 | returns True if given repo name is a valid filesystem repository |
|
641 | 641 | |
|
642 | 642 | :param cls: |
|
643 | 643 | :param repo_name: |
|
644 | 644 | """ |
|
645 | 645 | from kallithea.lib.utils import is_valid_repo |
|
646 | 646 | |
|
647 | 647 | return is_valid_repo(repo_name, cls.base_path()) |
|
648 | 648 | |
|
649 | 649 | #========================================================================== |
|
650 | 650 | # SCM PROPERTIES |
|
651 | 651 | #========================================================================== |
|
652 | 652 | |
|
653 | 653 | def get_changeset(self, rev): |
|
654 | 654 | return get_changeset_safe(self.scm_instance, rev) |
|
655 | 655 | |
|
656 | 656 | @property |
|
657 | 657 | def tip(self): |
|
658 | 658 | return self.get_changeset('tip') |
|
659 | 659 | |
|
660 | 660 | @property |
|
661 | 661 | def author(self): |
|
662 | 662 | return self.tip.author |
|
663 | 663 | |
|
664 | 664 | @property |
|
665 | 665 | def last_change(self): |
|
666 | 666 | return self.scm_instance.last_change |
|
667 | 667 | |
|
668 | 668 | def comments(self, revisions=None): |
|
669 | 669 | """ |
|
670 | 670 | Returns comments for this repository grouped by revisions |
|
671 | 671 | |
|
672 | 672 | :param revisions: filter query by revisions only |
|
673 | 673 | """ |
|
674 | 674 | cmts = ChangesetComment.query()\ |
|
675 | 675 | .filter(ChangesetComment.repo == self) |
|
676 | 676 | if revisions: |
|
677 | 677 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
678 | 678 | grouped = defaultdict(list) |
|
679 | 679 | for cmt in cmts.all(): |
|
680 | 680 | grouped[cmt.revision].append(cmt) |
|
681 | 681 | return grouped |
|
682 | 682 | |
|
683 | 683 | #========================================================================== |
|
684 | 684 | # SCM CACHE INSTANCE |
|
685 | 685 | #========================================================================== |
|
686 | 686 | |
|
687 | 687 | @property |
|
688 | 688 | def invalidate(self): |
|
689 | 689 | return CacheInvalidation.invalidate(self.repo_name) |
|
690 | 690 | |
|
691 | 691 | def set_invalidate(self): |
|
692 | 692 | """ |
|
693 | 693 | set a cache for invalidation for this instance |
|
694 | 694 | """ |
|
695 | 695 | CacheInvalidation.set_invalidate(self.repo_name) |
|
696 | 696 | |
|
697 | 697 | @LazyProperty |
|
698 | 698 | def scm_instance(self): |
|
699 | 699 | return self.__get_instance() |
|
700 | 700 | |
|
701 | 701 | @property |
|
702 | 702 | def scm_instance_cached(self): |
|
703 | 703 | @cache_region('long_term') |
|
704 | 704 | def _c(repo_name): |
|
705 | 705 | return self.__get_instance() |
|
706 | 706 | rn = self.repo_name |
|
707 | 707 | log.debug('Getting cached instance of repo') |
|
708 | 708 | inv = self.invalidate |
|
709 | 709 | if inv is not None: |
|
710 | 710 | region_invalidate(_c, None, rn) |
|
711 | 711 | # update our cache |
|
712 | 712 | CacheInvalidation.set_valid(inv.cache_key) |
|
713 | 713 | return _c(rn) |
|
714 | 714 | |
|
715 | 715 | def __get_instance(self): |
|
716 | 716 | repo_full_path = self.repo_full_path |
|
717 | 717 | try: |
|
718 | 718 | alias = get_scm(repo_full_path)[0] |
|
719 |
log.debug('Creating instance of %s repository' |
|
|
719 | log.debug('Creating instance of %s repository', alias) | |
|
720 | 720 | backend = get_backend(alias) |
|
721 | 721 | except VCSError: |
|
722 | 722 | log.error(traceback.format_exc()) |
|
723 | 723 | log.error('Perhaps this repository is in db and not in ' |
|
724 | 724 | 'filesystem run rescan repositories with ' |
|
725 | 725 | '"destroy old data " option from admin panel') |
|
726 | 726 | return |
|
727 | 727 | |
|
728 | 728 | if alias == 'hg': |
|
729 | 729 | |
|
730 | 730 | repo = backend(safe_str(repo_full_path), create=False, |
|
731 | 731 | baseui=self._ui) |
|
732 | 732 | else: |
|
733 | 733 | repo = backend(repo_full_path, create=False) |
|
734 | 734 | |
|
735 | 735 | return repo |
|
736 | 736 | |
|
737 | 737 | |
|
738 | 738 | class RepoGroup(Base, BaseModel): |
|
739 | 739 | __tablename__ = 'groups' |
|
740 | 740 | __table_args__ = ( |
|
741 | 741 | UniqueConstraint('group_name', 'group_parent_id'), |
|
742 | 742 | CheckConstraint('group_id != group_parent_id'), |
|
743 | 743 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
744 | 744 | 'mysql_charset': 'utf8'}, |
|
745 | 745 | ) |
|
746 | 746 | __mapper_args__ = {'order_by': 'group_name'} |
|
747 | 747 | |
|
748 | 748 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
749 | 749 | group_name = Column("group_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=False, unique=True, default=None) |
|
750 | 750 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
751 | 751 | group_description = Column("group_description", String(length=10000, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
752 | 752 | |
|
753 | 753 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id') |
|
754 | 754 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all') |
|
755 | 755 | |
|
756 | 756 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
757 | 757 | |
|
758 | 758 | def __init__(self, group_name='', parent_group=None): |
|
759 | 759 | self.group_name = group_name |
|
760 | 760 | self.parent_group = parent_group |
|
761 | 761 | |
|
762 | 762 | def __unicode__(self): |
|
763 | 763 | return u"<%s('%s:%s')>" % (self.__class__.__name__, self.group_id, |
|
764 | 764 | self.group_name) |
|
765 | 765 | |
|
766 | 766 | @classmethod |
|
767 | 767 | def groups_choices(cls): |
|
768 | 768 | from webhelpers.html import literal as _literal |
|
769 | 769 | repo_groups = [('', '')] |
|
770 | 770 | sep = ' » ' |
|
771 | 771 | _name = lambda k: _literal(sep.join(k)) |
|
772 | 772 | |
|
773 | 773 | repo_groups.extend([(x.group_id, _name(x.full_path_splitted)) |
|
774 | 774 | for x in cls.query().all()]) |
|
775 | 775 | |
|
776 | 776 | repo_groups = sorted(repo_groups, key=lambda t: t[1].split(sep)[0]) |
|
777 | 777 | return repo_groups |
|
778 | 778 | |
|
779 | 779 | @classmethod |
|
780 | 780 | def url_sep(cls): |
|
781 | 781 | return '/' |
|
782 | 782 | |
|
783 | 783 | @classmethod |
|
784 | 784 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
785 | 785 | if case_insensitive: |
|
786 | 786 | gr = cls.query()\ |
|
787 | 787 | .filter(cls.group_name.ilike(group_name)) |
|
788 | 788 | else: |
|
789 | 789 | gr = cls.query()\ |
|
790 | 790 | .filter(cls.group_name == group_name) |
|
791 | 791 | if cache: |
|
792 | 792 | gr = gr.options(FromCache( |
|
793 | 793 | "sql_cache_short", |
|
794 | 794 | "get_group_%s" % _hash_key(group_name) |
|
795 | 795 | ) |
|
796 | 796 | ) |
|
797 | 797 | return gr.scalar() |
|
798 | 798 | |
|
799 | 799 | @property |
|
800 | 800 | def parents(self): |
|
801 | 801 | parents_recursion_limit = 5 |
|
802 | 802 | groups = [] |
|
803 | 803 | if self.parent_group is None: |
|
804 | 804 | return groups |
|
805 | 805 | cur_gr = self.parent_group |
|
806 | 806 | groups.insert(0, cur_gr) |
|
807 | 807 | cnt = 0 |
|
808 | 808 | while 1: |
|
809 | 809 | cnt += 1 |
|
810 | 810 | gr = getattr(cur_gr, 'parent_group', None) |
|
811 | 811 | cur_gr = cur_gr.parent_group |
|
812 | 812 | if gr is None: |
|
813 | 813 | break |
|
814 | 814 | if cnt == parents_recursion_limit: |
|
815 | 815 | # this will prevent accidental infinite loops |
|
816 |
log.error('group nested more than %s' |
|
|
816 | log.error('group nested more than %s', | |
|
817 | 817 | parents_recursion_limit) |
|
818 | 818 | break |
|
819 | 819 | |
|
820 | 820 | groups.insert(0, gr) |
|
821 | 821 | return groups |
|
822 | 822 | |
|
823 | 823 | @property |
|
824 | 824 | def children(self): |
|
825 | 825 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
826 | 826 | |
|
827 | 827 | @property |
|
828 | 828 | def name(self): |
|
829 | 829 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
830 | 830 | |
|
831 | 831 | @property |
|
832 | 832 | def full_path(self): |
|
833 | 833 | return self.group_name |
|
834 | 834 | |
|
835 | 835 | @property |
|
836 | 836 | def full_path_splitted(self): |
|
837 | 837 | return self.group_name.split(RepoGroup.url_sep()) |
|
838 | 838 | |
|
839 | 839 | @property |
|
840 | 840 | def repositories(self): |
|
841 | 841 | return Repository.query()\ |
|
842 | 842 | .filter(Repository.group == self)\ |
|
843 | 843 | .order_by(Repository.repo_name) |
|
844 | 844 | |
|
845 | 845 | @property |
|
846 | 846 | def repositories_recursive_count(self): |
|
847 | 847 | cnt = self.repositories.count() |
|
848 | 848 | |
|
849 | 849 | def children_count(group): |
|
850 | 850 | cnt = 0 |
|
851 | 851 | for child in group.children: |
|
852 | 852 | cnt += child.repositories.count() |
|
853 | 853 | cnt += children_count(child) |
|
854 | 854 | return cnt |
|
855 | 855 | |
|
856 | 856 | return cnt + children_count(self) |
|
857 | 857 | |
|
858 | 858 | def get_new_name(self, group_name): |
|
859 | 859 | """ |
|
860 | 860 | returns new full group name based on parent and new name |
|
861 | 861 | |
|
862 | 862 | :param group_name: |
|
863 | 863 | """ |
|
864 | 864 | path_prefix = (self.parent_group.full_path_splitted if |
|
865 | 865 | self.parent_group else []) |
|
866 | 866 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
867 | 867 | |
|
868 | 868 | |
|
869 | 869 | class Permission(Base, BaseModel): |
|
870 | 870 | __tablename__ = 'permissions' |
|
871 | 871 | __table_args__ = ( |
|
872 | 872 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
873 | 873 | 'mysql_charset': 'utf8'}, |
|
874 | 874 | ) |
|
875 | 875 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
876 | 876 | permission_name = Column("permission_name", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
877 | 877 | permission_longname = Column("permission_longname", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
878 | 878 | |
|
879 | 879 | def __unicode__(self): |
|
880 | 880 | return u"<%s('%s:%s')>" % ( |
|
881 | 881 | self.__class__.__name__, self.permission_id, self.permission_name |
|
882 | 882 | ) |
|
883 | 883 | |
|
884 | 884 | @classmethod |
|
885 | 885 | def get_by_key(cls, key): |
|
886 | 886 | return cls.query().filter(cls.permission_name == key).scalar() |
|
887 | 887 | |
|
888 | 888 | @classmethod |
|
889 | 889 | def get_default_perms(cls, default_user_id): |
|
890 | 890 | q = Session.query(UserRepoToPerm, Repository, cls)\ |
|
891 | 891 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
892 | 892 | .join((cls, UserRepoToPerm.permission_id == cls.permission_id))\ |
|
893 | 893 | .filter(UserRepoToPerm.user_id == default_user_id) |
|
894 | 894 | |
|
895 | 895 | return q.all() |
|
896 | 896 | |
|
897 | 897 | @classmethod |
|
898 | 898 | def get_default_group_perms(cls, default_user_id): |
|
899 | 899 | q = Session.query(UserRepoGroupToPerm, RepoGroup, cls)\ |
|
900 | 900 | .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\ |
|
901 | 901 | .join((cls, UserRepoGroupToPerm.permission_id == cls.permission_id))\ |
|
902 | 902 | .filter(UserRepoGroupToPerm.user_id == default_user_id) |
|
903 | 903 | |
|
904 | 904 | return q.all() |
|
905 | 905 | |
|
906 | 906 | |
|
907 | 907 | class UserRepoToPerm(Base, BaseModel): |
|
908 | 908 | __tablename__ = 'repo_to_perm' |
|
909 | 909 | __table_args__ = ( |
|
910 | 910 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
911 | 911 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
912 | 912 | 'mysql_charset': 'utf8'} |
|
913 | 913 | ) |
|
914 | 914 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
915 | 915 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
916 | 916 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
917 | 917 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
918 | 918 | |
|
919 | 919 | user = relationship('User') |
|
920 | 920 | repository = relationship('Repository') |
|
921 | 921 | permission = relationship('Permission') |
|
922 | 922 | |
|
923 | 923 | @classmethod |
|
924 | 924 | def create(cls, user, repository, permission): |
|
925 | 925 | n = cls() |
|
926 | 926 | n.user = user |
|
927 | 927 | n.repository = repository |
|
928 | 928 | n.permission = permission |
|
929 | 929 | Session.add(n) |
|
930 | 930 | return n |
|
931 | 931 | |
|
932 | 932 | def __unicode__(self): |
|
933 | 933 | return u'<user:%s => %s >' % (self.user, self.repository) |
|
934 | 934 | |
|
935 | 935 | |
|
936 | 936 | class UserToPerm(Base, BaseModel): |
|
937 | 937 | __tablename__ = 'user_to_perm' |
|
938 | 938 | __table_args__ = ( |
|
939 | 939 | UniqueConstraint('user_id', 'permission_id'), |
|
940 | 940 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
941 | 941 | 'mysql_charset': 'utf8'} |
|
942 | 942 | ) |
|
943 | 943 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
944 | 944 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
945 | 945 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
946 | 946 | |
|
947 | 947 | user = relationship('User') |
|
948 | 948 | permission = relationship('Permission', lazy='joined') |
|
949 | 949 | |
|
950 | 950 | |
|
951 | 951 | class UserGroupRepoToPerm(Base, BaseModel): |
|
952 | 952 | __tablename__ = 'users_group_repo_to_perm' |
|
953 | 953 | __table_args__ = ( |
|
954 | 954 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
955 | 955 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
956 | 956 | 'mysql_charset': 'utf8'} |
|
957 | 957 | ) |
|
958 | 958 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
959 | 959 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
960 | 960 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
961 | 961 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
962 | 962 | |
|
963 | 963 | users_group = relationship('UserGroup') |
|
964 | 964 | permission = relationship('Permission') |
|
965 | 965 | repository = relationship('Repository') |
|
966 | 966 | |
|
967 | 967 | @classmethod |
|
968 | 968 | def create(cls, users_group, repository, permission): |
|
969 | 969 | n = cls() |
|
970 | 970 | n.users_group = users_group |
|
971 | 971 | n.repository = repository |
|
972 | 972 | n.permission = permission |
|
973 | 973 | Session.add(n) |
|
974 | 974 | return n |
|
975 | 975 | |
|
976 | 976 | def __unicode__(self): |
|
977 | 977 | return u'<userGroup:%s => %s >' % (self.users_group, self.repository) |
|
978 | 978 | |
|
979 | 979 | |
|
980 | 980 | class UserGroupToPerm(Base, BaseModel): |
|
981 | 981 | __tablename__ = 'users_group_to_perm' |
|
982 | 982 | __table_args__ = ( |
|
983 | 983 | UniqueConstraint('users_group_id', 'permission_id',), |
|
984 | 984 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
985 | 985 | 'mysql_charset': 'utf8'} |
|
986 | 986 | ) |
|
987 | 987 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
988 | 988 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
989 | 989 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
990 | 990 | |
|
991 | 991 | users_group = relationship('UserGroup') |
|
992 | 992 | permission = relationship('Permission') |
|
993 | 993 | |
|
994 | 994 | |
|
995 | 995 | class UserRepoGroupToPerm(Base, BaseModel): |
|
996 | 996 | __tablename__ = 'user_repo_group_to_perm' |
|
997 | 997 | __table_args__ = ( |
|
998 | 998 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
999 | 999 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1000 | 1000 | 'mysql_charset': 'utf8'} |
|
1001 | 1001 | ) |
|
1002 | 1002 | |
|
1003 | 1003 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1004 | 1004 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1005 | 1005 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
1006 | 1006 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1007 | 1007 | |
|
1008 | 1008 | user = relationship('User') |
|
1009 | 1009 | group = relationship('RepoGroup') |
|
1010 | 1010 | permission = relationship('Permission') |
|
1011 | 1011 | |
|
1012 | 1012 | |
|
1013 | 1013 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
1014 | 1014 | __tablename__ = 'users_group_repo_group_to_perm' |
|
1015 | 1015 | __table_args__ = ( |
|
1016 | 1016 | UniqueConstraint('users_group_id', 'group_id'), |
|
1017 | 1017 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1018 | 1018 | 'mysql_charset': 'utf8'} |
|
1019 | 1019 | ) |
|
1020 | 1020 | |
|
1021 | 1021 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1022 | 1022 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1023 | 1023 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
1024 | 1024 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
1025 | 1025 | |
|
1026 | 1026 | users_group = relationship('UserGroup') |
|
1027 | 1027 | permission = relationship('Permission') |
|
1028 | 1028 | group = relationship('RepoGroup') |
|
1029 | 1029 | |
|
1030 | 1030 | |
|
1031 | 1031 | class Statistics(Base, BaseModel): |
|
1032 | 1032 | __tablename__ = 'statistics' |
|
1033 | 1033 | __table_args__ = ( |
|
1034 | 1034 | UniqueConstraint('repository_id'), |
|
1035 | 1035 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1036 | 1036 | 'mysql_charset': 'utf8'} |
|
1037 | 1037 | ) |
|
1038 | 1038 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1039 | 1039 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
1040 | 1040 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
1041 | 1041 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data |
|
1042 | 1042 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data |
|
1043 | 1043 | languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data |
|
1044 | 1044 | |
|
1045 | 1045 | repository = relationship('Repository', single_parent=True) |
|
1046 | 1046 | |
|
1047 | 1047 | |
|
1048 | 1048 | class UserFollowing(Base, BaseModel): |
|
1049 | 1049 | __tablename__ = 'user_followings' |
|
1050 | 1050 | __table_args__ = ( |
|
1051 | 1051 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
1052 | 1052 | UniqueConstraint('user_id', 'follows_user_id'), |
|
1053 | 1053 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1054 | 1054 | 'mysql_charset': 'utf8'} |
|
1055 | 1055 | ) |
|
1056 | 1056 | |
|
1057 | 1057 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1058 | 1058 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1059 | 1059 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
1060 | 1060 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1061 | 1061 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
1062 | 1062 | |
|
1063 | 1063 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id') |
|
1064 | 1064 | |
|
1065 | 1065 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
1066 | 1066 | follows_repository = relationship('Repository', order_by='Repository.repo_name') |
|
1067 | 1067 | |
|
1068 | 1068 | @classmethod |
|
1069 | 1069 | def get_repo_followers(cls, repo_id): |
|
1070 | 1070 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
1071 | 1071 | |
|
1072 | 1072 | |
|
1073 | 1073 | class CacheInvalidation(Base, BaseModel): |
|
1074 | 1074 | __tablename__ = 'cache_invalidation' |
|
1075 | 1075 | __table_args__ = ( |
|
1076 | 1076 | UniqueConstraint('cache_key'), |
|
1077 | 1077 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1078 | 1078 | 'mysql_charset': 'utf8'}, |
|
1079 | 1079 | ) |
|
1080 | 1080 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1081 | 1081 | cache_key = Column("cache_key", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1082 | 1082 | cache_args = Column("cache_args", String(length=255, convert_unicode=False, assert_unicode=None), nullable=True, unique=None, default=None) |
|
1083 | 1083 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
1084 | 1084 | |
|
1085 | 1085 | def __init__(self, cache_key, cache_args=''): |
|
1086 | 1086 | self.cache_key = cache_key |
|
1087 | 1087 | self.cache_args = cache_args |
|
1088 | 1088 | self.cache_active = False |
|
1089 | 1089 | |
|
1090 | 1090 | def __unicode__(self): |
|
1091 | 1091 | return u"<%s('%s:%s')>" % (self.__class__.__name__, |
|
1092 | 1092 | self.cache_id, self.cache_key) |
|
1093 | 1093 | @classmethod |
|
1094 | 1094 | def clear_cache(cls): |
|
1095 | 1095 | cls.query().delete() |
|
1096 | 1096 | |
|
1097 | 1097 | @classmethod |
|
1098 | 1098 | def _get_key(cls, key): |
|
1099 | 1099 | """ |
|
1100 | 1100 | Wrapper for generating a key, together with a prefix |
|
1101 | 1101 | |
|
1102 | 1102 | :param key: |
|
1103 | 1103 | """ |
|
1104 | 1104 | import kallithea |
|
1105 | 1105 | prefix = '' |
|
1106 | 1106 | iid = kallithea.CONFIG.get('instance_id') |
|
1107 | 1107 | if iid: |
|
1108 | 1108 | prefix = iid |
|
1109 | 1109 | return "%s%s" % (prefix, key), prefix, key.rstrip('_README') |
|
1110 | 1110 | |
|
1111 | 1111 | @classmethod |
|
1112 | 1112 | def get_by_key(cls, key): |
|
1113 | 1113 | return cls.query().filter(cls.cache_key == key).scalar() |
|
1114 | 1114 | |
|
1115 | 1115 | @classmethod |
|
1116 | 1116 | def _get_or_create_key(cls, key, prefix, org_key): |
|
1117 | 1117 | inv_obj = Session.query(cls).filter(cls.cache_key == key).scalar() |
|
1118 | 1118 | if not inv_obj: |
|
1119 | 1119 | try: |
|
1120 | 1120 | inv_obj = CacheInvalidation(key, org_key) |
|
1121 | 1121 | Session.add(inv_obj) |
|
1122 | 1122 | Session.commit() |
|
1123 | 1123 | except Exception: |
|
1124 | 1124 | log.error(traceback.format_exc()) |
|
1125 | 1125 | Session.rollback() |
|
1126 | 1126 | return inv_obj |
|
1127 | 1127 | |
|
1128 | 1128 | @classmethod |
|
1129 | 1129 | def invalidate(cls, key): |
|
1130 | 1130 | """ |
|
1131 | 1131 | Returns Invalidation object if this given key should be invalidated |
|
1132 | 1132 | None otherwise. `cache_active = False` means that this cache |
|
1133 | 1133 | state is not valid and needs to be invalidated |
|
1134 | 1134 | |
|
1135 | 1135 | :param key: |
|
1136 | 1136 | """ |
|
1137 | 1137 | |
|
1138 | 1138 | key, _prefix, _org_key = cls._get_key(key) |
|
1139 | 1139 | inv = cls._get_or_create_key(key, _prefix, _org_key) |
|
1140 | 1140 | |
|
1141 | 1141 | if inv and inv.cache_active is False: |
|
1142 | 1142 | return inv |
|
1143 | 1143 | |
|
1144 | 1144 | @classmethod |
|
1145 | 1145 | def set_invalidate(cls, key): |
|
1146 | 1146 | """ |
|
1147 | 1147 | Mark this Cache key for invalidation |
|
1148 | 1148 | |
|
1149 | 1149 | :param key: |
|
1150 | 1150 | """ |
|
1151 | 1151 | |
|
1152 | 1152 | key, _prefix, _org_key = cls._get_key(key) |
|
1153 | 1153 | inv_objs = Session.query(cls).filter(cls.cache_args == _org_key).all() |
|
1154 |
log.debug('marking %s key[s] %s for invalidation' |
|
|
1155 |
_org_key) |
|
|
1154 | log.debug('marking %s key[s] %s for invalidation', len(inv_objs), | |
|
1155 | _org_key) | |
|
1156 | 1156 | try: |
|
1157 | 1157 | for inv_obj in inv_objs: |
|
1158 | 1158 | if inv_obj: |
|
1159 | 1159 | inv_obj.cache_active = False |
|
1160 | 1160 | |
|
1161 | 1161 | Session.add(inv_obj) |
|
1162 | 1162 | Session.commit() |
|
1163 | 1163 | except Exception: |
|
1164 | 1164 | log.error(traceback.format_exc()) |
|
1165 | 1165 | Session.rollback() |
|
1166 | 1166 | |
|
1167 | 1167 | @classmethod |
|
1168 | 1168 | def set_valid(cls, key): |
|
1169 | 1169 | """ |
|
1170 | 1170 | Mark this cache key as active and currently cached |
|
1171 | 1171 | |
|
1172 | 1172 | :param key: |
|
1173 | 1173 | """ |
|
1174 | 1174 | inv_obj = cls.get_by_key(key) |
|
1175 | 1175 | inv_obj.cache_active = True |
|
1176 | 1176 | Session.add(inv_obj) |
|
1177 | 1177 | Session.commit() |
|
1178 | 1178 | |
|
1179 | 1179 | |
|
1180 | 1180 | class ChangesetComment(Base, BaseModel): |
|
1181 | 1181 | __tablename__ = 'changeset_comments' |
|
1182 | 1182 | __table_args__ = ( |
|
1183 | 1183 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1184 | 1184 | 'mysql_charset': 'utf8'}, |
|
1185 | 1185 | ) |
|
1186 | 1186 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
1187 | 1187 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1188 | 1188 | revision = Column('revision', String(40), nullable=False) |
|
1189 | 1189 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
1190 | 1190 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
1191 | 1191 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
1192 | 1192 | text = Column('text', Unicode(25000), nullable=False) |
|
1193 | 1193 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
1194 | 1194 | |
|
1195 | 1195 | author = relationship('User', lazy='joined') |
|
1196 | 1196 | repo = relationship('Repository') |
|
1197 | 1197 | |
|
1198 | 1198 | @classmethod |
|
1199 | 1199 | def get_users(cls, revision): |
|
1200 | 1200 | """ |
|
1201 | 1201 | Returns user associated with this changesetComment. ie those |
|
1202 | 1202 | who actually commented |
|
1203 | 1203 | |
|
1204 | 1204 | :param cls: |
|
1205 | 1205 | :param revision: |
|
1206 | 1206 | """ |
|
1207 | 1207 | return Session.query(User)\ |
|
1208 | 1208 | .filter(cls.revision == revision)\ |
|
1209 | 1209 | .join(ChangesetComment.author).all() |
|
1210 | 1210 | |
|
1211 | 1211 | |
|
1212 | 1212 | class Notification(Base, BaseModel): |
|
1213 | 1213 | __tablename__ = 'notifications' |
|
1214 | 1214 | __table_args__ = ( |
|
1215 | 1215 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1216 | 1216 | 'mysql_charset': 'utf8'}, |
|
1217 | 1217 | ) |
|
1218 | 1218 | |
|
1219 | 1219 | TYPE_CHANGESET_COMMENT = u'cs_comment' |
|
1220 | 1220 | TYPE_MESSAGE = u'message' |
|
1221 | 1221 | TYPE_MENTION = u'mention' |
|
1222 | 1222 | TYPE_REGISTRATION = u'registration' |
|
1223 | 1223 | |
|
1224 | 1224 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
1225 | 1225 | subject = Column('subject', Unicode(512), nullable=True) |
|
1226 | 1226 | body = Column('body', Unicode(50000), nullable=True) |
|
1227 | 1227 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
1228 | 1228 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1229 | 1229 | type_ = Column('type', Unicode(256)) |
|
1230 | 1230 | |
|
1231 | 1231 | created_by_user = relationship('User') |
|
1232 | 1232 | notifications_to_users = relationship('UserNotification', lazy='joined', |
|
1233 | 1233 | cascade="all, delete, delete-orphan") |
|
1234 | 1234 | |
|
1235 | 1235 | @property |
|
1236 | 1236 | def recipients(self): |
|
1237 | 1237 | return [x.user for x in UserNotification.query()\ |
|
1238 | 1238 | .filter(UserNotification.notification == self).all()] |
|
1239 | 1239 | |
|
1240 | 1240 | @classmethod |
|
1241 | 1241 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
1242 | 1242 | if type_ is None: |
|
1243 | 1243 | type_ = Notification.TYPE_MESSAGE |
|
1244 | 1244 | |
|
1245 | 1245 | notification = cls() |
|
1246 | 1246 | notification.created_by_user = created_by |
|
1247 | 1247 | notification.subject = subject |
|
1248 | 1248 | notification.body = body |
|
1249 | 1249 | notification.type_ = type_ |
|
1250 | 1250 | notification.created_on = datetime.datetime.now() |
|
1251 | 1251 | |
|
1252 | 1252 | for u in recipients: |
|
1253 | 1253 | assoc = UserNotification() |
|
1254 | 1254 | assoc.notification = notification |
|
1255 | 1255 | u.notifications.append(assoc) |
|
1256 | 1256 | Session.add(notification) |
|
1257 | 1257 | return notification |
|
1258 | 1258 | |
|
1259 | 1259 | @property |
|
1260 | 1260 | def description(self): |
|
1261 | 1261 | from kallithea.model.notification import NotificationModel |
|
1262 | 1262 | return NotificationModel().make_description(self) |
|
1263 | 1263 | |
|
1264 | 1264 | |
|
1265 | 1265 | class UserNotification(Base, BaseModel): |
|
1266 | 1266 | __tablename__ = 'user_to_notification' |
|
1267 | 1267 | __table_args__ = ( |
|
1268 | 1268 | UniqueConstraint('user_id', 'notification_id'), |
|
1269 | 1269 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1270 | 1270 | 'mysql_charset': 'utf8'} |
|
1271 | 1271 | ) |
|
1272 | 1272 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
1273 | 1273 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
1274 | 1274 | read = Column('read', Boolean, default=False) |
|
1275 | 1275 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
1276 | 1276 | |
|
1277 | 1277 | user = relationship('User', lazy="joined") |
|
1278 | 1278 | notification = relationship('Notification', lazy="joined", |
|
1279 | 1279 | order_by=lambda: Notification.created_on.desc(),) |
|
1280 | 1280 | |
|
1281 | 1281 | def mark_as_read(self): |
|
1282 | 1282 | self.read = True |
|
1283 | 1283 | Session.add(self) |
|
1284 | 1284 | |
|
1285 | 1285 | |
|
1286 | 1286 | class DbMigrateVersion(Base, BaseModel): |
|
1287 | 1287 | __tablename__ = 'db_migrate_version' |
|
1288 | 1288 | __table_args__ = ( |
|
1289 | 1289 | {'extend_existing': True, 'mysql_engine':'InnoDB', |
|
1290 | 1290 | 'mysql_charset': 'utf8'}, |
|
1291 | 1291 | ) |
|
1292 | 1292 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
1293 | 1293 | repository_path = Column('repository_path', Text) |
|
1294 | 1294 | version = Column('version', Integer) |
|
1295 | 1295 | |
|
1296 | 1296 | ## this is migration from 1_4_0, but now it's here to overcome a problem of |
|
1297 | 1297 | ## attaching a FK to this from 1_3_0 ! |
|
1298 | 1298 | |
|
1299 | 1299 | |
|
1300 | 1300 | class PullRequest(Base, BaseModel): |
|
1301 | 1301 | __tablename__ = 'pull_requests' |
|
1302 | 1302 | __table_args__ = ( |
|
1303 | 1303 | {'extend_existing': True, 'mysql_engine': 'InnoDB', |
|
1304 | 1304 | 'mysql_charset': 'utf8'}, |
|
1305 | 1305 | ) |
|
1306 | 1306 | |
|
1307 | 1307 | STATUS_NEW = u'new' |
|
1308 | 1308 | STATUS_OPEN = u'open' |
|
1309 | 1309 | STATUS_CLOSED = u'closed' |
|
1310 | 1310 | |
|
1311 | 1311 | pull_request_id = Column('pull_request_id', Integer(), nullable=False, primary_key=True) |
|
1312 | 1312 | title = Column('title', Unicode(256), nullable=True) |
|
1313 | 1313 | description = Column('description', UnicodeText(10240), nullable=True) |
|
1314 | 1314 | status = Column('status', Unicode(256), nullable=False, default=STATUS_NEW) |
|
1315 | 1315 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1316 | 1316 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1317 | 1317 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
1318 | 1318 | _revisions = Column('revisions', UnicodeText(20500)) # 500 revisions max |
|
1319 | 1319 | org_repo_id = Column('org_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1320 | 1320 | org_ref = Column('org_ref', Unicode(256), nullable=False) |
|
1321 | 1321 | other_repo_id = Column('other_repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
1322 | 1322 | other_ref = Column('other_ref', Unicode(256), nullable=False) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now