Show More
@@ -1,229 +1,229 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # |
|
2 | # | |
3 | # Kallithea documentation build configuration file, created by |
|
3 | # Kallithea documentation build configuration file, created by | |
4 | # sphinx-quickstart on Sun Oct 10 16:46:37 2010. |
|
4 | # sphinx-quickstart on Sun Oct 10 16:46:37 2010. | |
5 | # |
|
5 | # | |
6 | # This file is execfile()d with the current directory set to its containing dir. |
|
6 | # This file is execfile()d with the current directory set to its containing dir. | |
7 | # |
|
7 | # | |
8 | # Note that not all possible configuration values are present in this |
|
8 | # Note that not all possible configuration values are present in this | |
9 | # autogenerated file. |
|
9 | # autogenerated file. | |
10 | # |
|
10 | # | |
11 | # All configuration values have a default; values that are commented out |
|
11 | # All configuration values have a default; values that are commented out | |
12 | # serve to show the default. |
|
12 | # serve to show the default. | |
13 |
|
13 | |||
14 | import os |
|
14 | import os | |
15 | import sys |
|
15 | import sys | |
16 |
|
16 | |||
17 | from kallithea import __version__ |
|
17 | import kallithea | |
18 |
|
18 | |||
19 |
|
19 | |||
20 | # If extensions (or modules to document with autodoc) are in another directory, |
|
20 | # If extensions (or modules to document with autodoc) are in another directory, | |
21 | # add these directories to sys.path here. If the directory is relative to the |
|
21 | # add these directories to sys.path here. If the directory is relative to the | |
22 | # documentation root, use os.path.abspath to make it absolute, like shown here. |
|
22 | # documentation root, use os.path.abspath to make it absolute, like shown here. | |
23 | sys.path.insert(0, os.path.abspath('..')) |
|
23 | sys.path.insert(0, os.path.abspath('..')) | |
24 |
|
24 | |||
25 | # -- General configuration ----------------------------------------------------- |
|
25 | # -- General configuration ----------------------------------------------------- | |
26 |
|
26 | |||
27 | # If your documentation needs a minimal Sphinx version, state it here. |
|
27 | # If your documentation needs a minimal Sphinx version, state it here. | |
28 | #needs_sphinx = '1.0' |
|
28 | #needs_sphinx = '1.0' | |
29 |
|
29 | |||
30 | # Add any Sphinx extension module names here, as strings. They can be extensions |
|
30 | # Add any Sphinx extension module names here, as strings. They can be extensions | |
31 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. |
|
31 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | |
32 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', |
|
32 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', | |
33 | 'sphinx.ext.intersphinx', 'sphinx.ext.todo', |
|
33 | 'sphinx.ext.intersphinx', 'sphinx.ext.todo', | |
34 | 'sphinx.ext.viewcode'] |
|
34 | 'sphinx.ext.viewcode'] | |
35 |
|
35 | |||
36 | # Add any paths that contain templates here, relative to this directory. |
|
36 | # Add any paths that contain templates here, relative to this directory. | |
37 | templates_path = ['_templates'] |
|
37 | templates_path = ['_templates'] | |
38 |
|
38 | |||
39 | # The suffix of source filenames. |
|
39 | # The suffix of source filenames. | |
40 | source_suffix = '.rst' |
|
40 | source_suffix = '.rst' | |
41 |
|
41 | |||
42 | # The encoding of source files. |
|
42 | # The encoding of source files. | |
43 | #source_encoding = 'utf-8-sig' |
|
43 | #source_encoding = 'utf-8-sig' | |
44 |
|
44 | |||
45 | # The master toctree document. |
|
45 | # The master toctree document. | |
46 | master_doc = 'index' |
|
46 | master_doc = 'index' | |
47 |
|
47 | |||
48 | # General information about the project. |
|
48 | # General information about the project. | |
49 | project = 'Kallithea' |
|
49 | project = 'Kallithea' | |
50 | copyright = '2010-2020 by various authors, licensed as GPLv3.' |
|
50 | copyright = '2010-2020 by various authors, licensed as GPLv3.' | |
51 |
|
51 | |||
52 | # The version info for the project you're documenting, acts as replacement for |
|
52 | # The version info for the project you're documenting, acts as replacement for | |
53 | # |version| and |release|, also used in various other places throughout the |
|
53 | # |version| and |release|, also used in various other places throughout the | |
54 | # built documents. |
|
54 | # built documents. | |
55 | # |
|
55 | # | |
56 | # The short X.Y version. |
|
56 | # The short X.Y version. | |
57 | root = os.path.dirname(os.path.dirname(__file__)) |
|
57 | root = os.path.dirname(os.path.dirname(__file__)) | |
58 | sys.path.append(root) |
|
58 | sys.path.append(root) | |
59 | version = __version__ |
|
59 | version = kallithea.__version__ | |
60 | # The full version, including alpha/beta/rc tags. |
|
60 | # The full version, including alpha/beta/rc tags. | |
61 | release = __version__ |
|
61 | release = kallithea.__version__ | |
62 |
|
62 | |||
63 | # The language for content autogenerated by Sphinx. Refer to documentation |
|
63 | # The language for content autogenerated by Sphinx. Refer to documentation | |
64 | # for a list of supported languages. |
|
64 | # for a list of supported languages. | |
65 | #language = None |
|
65 | #language = None | |
66 |
|
66 | |||
67 | # There are two options for replacing |today|: either, you set today to some |
|
67 | # There are two options for replacing |today|: either, you set today to some | |
68 | # non-false value, then it is used: |
|
68 | # non-false value, then it is used: | |
69 | #today = '' |
|
69 | #today = '' | |
70 | # Else, today_fmt is used as the format for a strftime call. |
|
70 | # Else, today_fmt is used as the format for a strftime call. | |
71 | #today_fmt = '%B %d, %Y' |
|
71 | #today_fmt = '%B %d, %Y' | |
72 |
|
72 | |||
73 | # List of patterns, relative to source directory, that match files and |
|
73 | # List of patterns, relative to source directory, that match files and | |
74 | # directories to ignore when looking for source files. |
|
74 | # directories to ignore when looking for source files. | |
75 | exclude_patterns = ['_build'] |
|
75 | exclude_patterns = ['_build'] | |
76 |
|
76 | |||
77 | # The reST default role (used for this markup: `text`) to use for all documents. |
|
77 | # The reST default role (used for this markup: `text`) to use for all documents. | |
78 | #default_role = None |
|
78 | #default_role = None | |
79 |
|
79 | |||
80 | # If true, '()' will be appended to :func: etc. cross-reference text. |
|
80 | # If true, '()' will be appended to :func: etc. cross-reference text. | |
81 | #add_function_parentheses = True |
|
81 | #add_function_parentheses = True | |
82 |
|
82 | |||
83 | # If true, the current module name will be prepended to all description |
|
83 | # If true, the current module name will be prepended to all description | |
84 | # unit titles (such as .. function::). |
|
84 | # unit titles (such as .. function::). | |
85 | #add_module_names = True |
|
85 | #add_module_names = True | |
86 |
|
86 | |||
87 | # If true, sectionauthor and moduleauthor directives will be shown in the |
|
87 | # If true, sectionauthor and moduleauthor directives will be shown in the | |
88 | # output. They are ignored by default. |
|
88 | # output. They are ignored by default. | |
89 | #show_authors = False |
|
89 | #show_authors = False | |
90 |
|
90 | |||
91 | # The name of the Pygments (syntax highlighting) style to use. |
|
91 | # The name of the Pygments (syntax highlighting) style to use. | |
92 | pygments_style = 'sphinx' |
|
92 | pygments_style = 'sphinx' | |
93 | highlight_language = 'none' |
|
93 | highlight_language = 'none' | |
94 |
|
94 | |||
95 | # A list of ignored prefixes for module index sorting. |
|
95 | # A list of ignored prefixes for module index sorting. | |
96 | #modindex_common_prefix = [] |
|
96 | #modindex_common_prefix = [] | |
97 |
|
97 | |||
98 |
|
98 | |||
99 | # -- Options for HTML output --------------------------------------------------- |
|
99 | # -- Options for HTML output --------------------------------------------------- | |
100 |
|
100 | |||
101 | # The theme to use for HTML and HTML Help pages. See the documentation for |
|
101 | # The theme to use for HTML and HTML Help pages. See the documentation for | |
102 | # a list of builtin themes. |
|
102 | # a list of builtin themes. | |
103 | html_theme = 'nature' |
|
103 | html_theme = 'nature' | |
104 |
|
104 | |||
105 | # Theme options are theme-specific and customize the look and feel of a theme |
|
105 | # Theme options are theme-specific and customize the look and feel of a theme | |
106 | # further. For a list of options available for each theme, see the |
|
106 | # further. For a list of options available for each theme, see the | |
107 | # documentation. |
|
107 | # documentation. | |
108 | #html_theme_options = {} |
|
108 | #html_theme_options = {} | |
109 |
|
109 | |||
110 | # Add any paths that contain custom themes here, relative to this directory. |
|
110 | # Add any paths that contain custom themes here, relative to this directory. | |
111 | html_theme_path = ['theme'] |
|
111 | html_theme_path = ['theme'] | |
112 |
|
112 | |||
113 | # The name for this set of Sphinx documents. If None, it defaults to |
|
113 | # The name for this set of Sphinx documents. If None, it defaults to | |
114 | # "<project> v<release> documentation". |
|
114 | # "<project> v<release> documentation". | |
115 | #html_title = None |
|
115 | #html_title = None | |
116 |
|
116 | |||
117 | # A shorter title for the navigation bar. Default is the same as html_title. |
|
117 | # A shorter title for the navigation bar. Default is the same as html_title. | |
118 | #html_short_title = None |
|
118 | #html_short_title = None | |
119 |
|
119 | |||
120 | # The name of an image file (relative to this directory) to place at the top |
|
120 | # The name of an image file (relative to this directory) to place at the top | |
121 | # of the sidebar. |
|
121 | # of the sidebar. | |
122 | #html_logo = None |
|
122 | #html_logo = None | |
123 |
|
123 | |||
124 | # The name of an image file (within the static path) to use as favicon of the |
|
124 | # The name of an image file (within the static path) to use as favicon of the | |
125 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 |
|
125 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 | |
126 | # pixels large. |
|
126 | # pixels large. | |
127 | #html_favicon = None |
|
127 | #html_favicon = None | |
128 |
|
128 | |||
129 | # Add any paths that contain custom static files (such as style sheets) here, |
|
129 | # Add any paths that contain custom static files (such as style sheets) here, | |
130 | # relative to this directory. They are copied after the builtin static files, |
|
130 | # relative to this directory. They are copied after the builtin static files, | |
131 | # so a file named "default.css" will overwrite the builtin "default.css". |
|
131 | # so a file named "default.css" will overwrite the builtin "default.css". | |
132 | #html_static_path = ['_static'] |
|
132 | #html_static_path = ['_static'] | |
133 |
|
133 | |||
134 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, |
|
134 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | |
135 | # using the given strftime format. |
|
135 | # using the given strftime format. | |
136 | #html_last_updated_fmt = '%b %d, %Y' |
|
136 | #html_last_updated_fmt = '%b %d, %Y' | |
137 |
|
137 | |||
138 | # If true, SmartyPants will be used to convert quotes and dashes to |
|
138 | # If true, SmartyPants will be used to convert quotes and dashes to | |
139 | # typographically correct entities. |
|
139 | # typographically correct entities. | |
140 | #html_use_smartypants = True |
|
140 | #html_use_smartypants = True | |
141 |
|
141 | |||
142 | # Custom sidebar templates, maps document names to template names. |
|
142 | # Custom sidebar templates, maps document names to template names. | |
143 | #html_sidebars = {} |
|
143 | #html_sidebars = {} | |
144 |
|
144 | |||
145 | # Additional templates that should be rendered to pages, maps page names to |
|
145 | # Additional templates that should be rendered to pages, maps page names to | |
146 | # template names. |
|
146 | # template names. | |
147 | #html_additional_pages = {} |
|
147 | #html_additional_pages = {} | |
148 |
|
148 | |||
149 | # If false, no module index is generated. |
|
149 | # If false, no module index is generated. | |
150 | #html_domain_indices = True |
|
150 | #html_domain_indices = True | |
151 |
|
151 | |||
152 | # If false, no index is generated. |
|
152 | # If false, no index is generated. | |
153 | #html_use_index = True |
|
153 | #html_use_index = True | |
154 |
|
154 | |||
155 | # If true, the index is split into individual pages for each letter. |
|
155 | # If true, the index is split into individual pages for each letter. | |
156 | #html_split_index = False |
|
156 | #html_split_index = False | |
157 |
|
157 | |||
158 | # If true, links to the reST sources are added to the pages. |
|
158 | # If true, links to the reST sources are added to the pages. | |
159 | #html_show_sourcelink = True |
|
159 | #html_show_sourcelink = True | |
160 |
|
160 | |||
161 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. |
|
161 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. | |
162 | #html_show_sphinx = True |
|
162 | #html_show_sphinx = True | |
163 |
|
163 | |||
164 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. |
|
164 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. | |
165 | #html_show_copyright = True |
|
165 | #html_show_copyright = True | |
166 |
|
166 | |||
167 | # If true, an OpenSearch description file will be output, and all pages will |
|
167 | # If true, an OpenSearch description file will be output, and all pages will | |
168 | # contain a <link> tag referring to it. The value of this option must be the |
|
168 | # contain a <link> tag referring to it. The value of this option must be the | |
169 | # base URL from which the finished HTML is served. |
|
169 | # base URL from which the finished HTML is served. | |
170 | #html_use_opensearch = '' |
|
170 | #html_use_opensearch = '' | |
171 |
|
171 | |||
172 | # This is the file name suffix for HTML files (e.g. ".xhtml"). |
|
172 | # This is the file name suffix for HTML files (e.g. ".xhtml"). | |
173 | #html_file_suffix = None |
|
173 | #html_file_suffix = None | |
174 |
|
174 | |||
175 | # Output file base name for HTML help builder. |
|
175 | # Output file base name for HTML help builder. | |
176 | htmlhelp_basename = 'Kallithea-docs' |
|
176 | htmlhelp_basename = 'Kallithea-docs' | |
177 |
|
177 | |||
178 |
|
178 | |||
179 | # -- Options for LaTeX output -------------------------------------------------- |
|
179 | # -- Options for LaTeX output -------------------------------------------------- | |
180 |
|
180 | |||
181 | # The paper size ('letter' or 'a4'). |
|
181 | # The paper size ('letter' or 'a4'). | |
182 | #latex_paper_size = 'letter' |
|
182 | #latex_paper_size = 'letter' | |
183 |
|
183 | |||
184 | # The font size ('10pt', '11pt' or '12pt'). |
|
184 | # The font size ('10pt', '11pt' or '12pt'). | |
185 | #latex_font_size = '10pt' |
|
185 | #latex_font_size = '10pt' | |
186 |
|
186 | |||
187 | # Grouping the document tree into LaTeX files. List of tuples |
|
187 | # Grouping the document tree into LaTeX files. List of tuples | |
188 | # (source start file, target name, title, author, documentclass [howto/manual]). |
|
188 | # (source start file, target name, title, author, documentclass [howto/manual]). | |
189 | latex_documents = [ |
|
189 | latex_documents = [ | |
190 | ('index', 'Kallithea.tex', 'Kallithea Documentation', |
|
190 | ('index', 'Kallithea.tex', 'Kallithea Documentation', | |
191 | 'Kallithea Developers', 'manual'), |
|
191 | 'Kallithea Developers', 'manual'), | |
192 | ] |
|
192 | ] | |
193 |
|
193 | |||
194 | # The name of an image file (relative to this directory) to place at the top of |
|
194 | # The name of an image file (relative to this directory) to place at the top of | |
195 | # the title page. |
|
195 | # the title page. | |
196 | #latex_logo = None |
|
196 | #latex_logo = None | |
197 |
|
197 | |||
198 | # For "manual" documents, if this is true, then toplevel headings are parts, |
|
198 | # For "manual" documents, if this is true, then toplevel headings are parts, | |
199 | # not chapters. |
|
199 | # not chapters. | |
200 | #latex_use_parts = False |
|
200 | #latex_use_parts = False | |
201 |
|
201 | |||
202 | # If true, show page references after internal links. |
|
202 | # If true, show page references after internal links. | |
203 | #latex_show_pagerefs = False |
|
203 | #latex_show_pagerefs = False | |
204 |
|
204 | |||
205 | # If true, show URL addresses after external links. |
|
205 | # If true, show URL addresses after external links. | |
206 | #latex_show_urls = False |
|
206 | #latex_show_urls = False | |
207 |
|
207 | |||
208 | # Additional stuff for the LaTeX preamble. |
|
208 | # Additional stuff for the LaTeX preamble. | |
209 | #latex_preamble = '' |
|
209 | #latex_preamble = '' | |
210 |
|
210 | |||
211 | # Documents to append as an appendix to all manuals. |
|
211 | # Documents to append as an appendix to all manuals. | |
212 | #latex_appendices = [] |
|
212 | #latex_appendices = [] | |
213 |
|
213 | |||
214 | # If false, no module index is generated. |
|
214 | # If false, no module index is generated. | |
215 | #latex_domain_indices = True |
|
215 | #latex_domain_indices = True | |
216 |
|
216 | |||
217 |
|
217 | |||
218 | # -- Options for manual page output -------------------------------------------- |
|
218 | # -- Options for manual page output -------------------------------------------- | |
219 |
|
219 | |||
220 | # One entry per manual page. List of tuples |
|
220 | # One entry per manual page. List of tuples | |
221 | # (source start file, name, description, authors, manual section). |
|
221 | # (source start file, name, description, authors, manual section). | |
222 | man_pages = [ |
|
222 | man_pages = [ | |
223 | ('index', 'kallithea', 'Kallithea Documentation', |
|
223 | ('index', 'kallithea', 'Kallithea Documentation', | |
224 | ['Kallithea Developers'], 1) |
|
224 | ['Kallithea Developers'], 1) | |
225 | ] |
|
225 | ] | |
226 |
|
226 | |||
227 |
|
227 | |||
228 | # Example configuration for intersphinx: refer to the Python standard library. |
|
228 | # Example configuration for intersphinx: refer to the Python standard library. | |
229 | intersphinx_mapping = {'http://docs.python.org/': None} |
|
229 | intersphinx_mapping = {'http://docs.python.org/': None} |
@@ -1,649 +1,649 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 |
|
14 | |||
15 | """ |
|
15 | """ | |
16 | kallithea.lib.base |
|
16 | kallithea.lib.base | |
17 | ~~~~~~~~~~~~~~~~~~ |
|
17 | ~~~~~~~~~~~~~~~~~~ | |
18 |
|
18 | |||
19 | The base Controller API |
|
19 | The base Controller API | |
20 | Provides the BaseController class for subclassing. And usage in different |
|
20 | Provides the BaseController class for subclassing. And usage in different | |
21 | controllers |
|
21 | controllers | |
22 |
|
22 | |||
23 | This file was forked by the Kallithea project in July 2014. |
|
23 | This file was forked by the Kallithea project in July 2014. | |
24 | Original author and date, and relevant copyright and licensing information is below: |
|
24 | Original author and date, and relevant copyright and licensing information is below: | |
25 | :created_on: Oct 06, 2010 |
|
25 | :created_on: Oct 06, 2010 | |
26 | :author: marcink |
|
26 | :author: marcink | |
27 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
27 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
28 | :license: GPLv3, see LICENSE.md for more details. |
|
28 | :license: GPLv3, see LICENSE.md for more details. | |
29 | """ |
|
29 | """ | |
30 |
|
30 | |||
31 | import base64 |
|
31 | import base64 | |
32 | import datetime |
|
32 | import datetime | |
33 | import logging |
|
33 | import logging | |
34 | import traceback |
|
34 | import traceback | |
35 | import warnings |
|
35 | import warnings | |
36 |
|
36 | |||
37 | import decorator |
|
37 | import decorator | |
38 | import paste.auth.basic |
|
38 | import paste.auth.basic | |
39 | import paste.httpexceptions |
|
39 | import paste.httpexceptions | |
40 | import paste.httpheaders |
|
40 | import paste.httpheaders | |
41 | import webob.exc |
|
41 | import webob.exc | |
42 | from tg import TGController, config, render_template, request, response, session |
|
42 | from tg import TGController, config, render_template, request, response, session | |
43 | from tg import tmpl_context as c |
|
43 | from tg import tmpl_context as c | |
44 | from tg.i18n import ugettext as _ |
|
44 | from tg.i18n import ugettext as _ | |
45 |
|
45 | |||
46 | from kallithea import BACKENDS, __version__ |
|
46 | import kallithea | |
47 | from kallithea.config.routing import url |
|
47 | from kallithea.config.routing import url | |
48 | from kallithea.lib import auth_modules, ext_json |
|
48 | from kallithea.lib import auth_modules, ext_json | |
49 | from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware |
|
49 | from kallithea.lib.auth import AuthUser, HasPermissionAnyMiddleware | |
50 | from kallithea.lib.exceptions import UserCreationError |
|
50 | from kallithea.lib.exceptions import UserCreationError | |
51 | from kallithea.lib.utils import get_repo_slug, is_valid_repo |
|
51 | from kallithea.lib.utils import get_repo_slug, is_valid_repo | |
52 | from kallithea.lib.utils2 import AttributeDict, asbool, ascii_bytes, safe_int, safe_str, set_hook_environment |
|
52 | from kallithea.lib.utils2 import AttributeDict, asbool, ascii_bytes, safe_int, safe_str, set_hook_environment | |
53 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError |
|
53 | from kallithea.lib.vcs.exceptions import ChangesetDoesNotExistError, EmptyRepositoryError, RepositoryError | |
54 | from kallithea.model import meta |
|
54 | from kallithea.model import meta | |
55 | from kallithea.model.db import PullRequest, Repository, Setting, User |
|
55 | from kallithea.model.db import PullRequest, Repository, Setting, User | |
56 | from kallithea.model.scm import ScmModel |
|
56 | from kallithea.model.scm import ScmModel | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | log = logging.getLogger(__name__) |
|
59 | log = logging.getLogger(__name__) | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | def render(template_path): |
|
62 | def render(template_path): | |
63 | return render_template({'url': url}, 'mako', template_path) |
|
63 | return render_template({'url': url}, 'mako', template_path) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | def _filter_proxy(ip): |
|
66 | def _filter_proxy(ip): | |
67 | """ |
|
67 | """ | |
68 | HEADERS can have multiple ips inside the left-most being the original |
|
68 | HEADERS can have multiple ips inside the left-most being the original | |
69 | client, and each successive proxy that passed the request adding the IP |
|
69 | client, and each successive proxy that passed the request adding the IP | |
70 | address where it received the request from. |
|
70 | address where it received the request from. | |
71 |
|
71 | |||
72 | :param ip: |
|
72 | :param ip: | |
73 | """ |
|
73 | """ | |
74 | if ',' in ip: |
|
74 | if ',' in ip: | |
75 | _ips = ip.split(',') |
|
75 | _ips = ip.split(',') | |
76 | _first_ip = _ips[0].strip() |
|
76 | _first_ip = _ips[0].strip() | |
77 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) |
|
77 | log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip) | |
78 | return _first_ip |
|
78 | return _first_ip | |
79 | return ip |
|
79 | return ip | |
80 |
|
80 | |||
81 |
|
81 | |||
82 | def _get_ip_addr(environ): |
|
82 | def _get_ip_addr(environ): | |
83 | proxy_key = 'HTTP_X_REAL_IP' |
|
83 | proxy_key = 'HTTP_X_REAL_IP' | |
84 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' |
|
84 | proxy_key2 = 'HTTP_X_FORWARDED_FOR' | |
85 | def_key = 'REMOTE_ADDR' |
|
85 | def_key = 'REMOTE_ADDR' | |
86 |
|
86 | |||
87 | ip = environ.get(proxy_key) |
|
87 | ip = environ.get(proxy_key) | |
88 | if ip: |
|
88 | if ip: | |
89 | return _filter_proxy(ip) |
|
89 | return _filter_proxy(ip) | |
90 |
|
90 | |||
91 | ip = environ.get(proxy_key2) |
|
91 | ip = environ.get(proxy_key2) | |
92 | if ip: |
|
92 | if ip: | |
93 | return _filter_proxy(ip) |
|
93 | return _filter_proxy(ip) | |
94 |
|
94 | |||
95 | ip = environ.get(def_key, '0.0.0.0') |
|
95 | ip = environ.get(def_key, '0.0.0.0') | |
96 | return _filter_proxy(ip) |
|
96 | return _filter_proxy(ip) | |
97 |
|
97 | |||
98 |
|
98 | |||
99 | def get_path_info(environ): |
|
99 | def get_path_info(environ): | |
100 | """Return PATH_INFO from environ ... using tg.original_request if available. |
|
100 | """Return PATH_INFO from environ ... using tg.original_request if available. | |
101 |
|
101 | |||
102 | In Python 3 WSGI, PATH_INFO is a unicode str, but kind of contains encoded |
|
102 | In Python 3 WSGI, PATH_INFO is a unicode str, but kind of contains encoded | |
103 | bytes. The code points are guaranteed to only use the lower 8 bit bits, and |
|
103 | bytes. The code points are guaranteed to only use the lower 8 bit bits, and | |
104 | encoding the string with the 1:1 encoding latin1 will give the |
|
104 | encoding the string with the 1:1 encoding latin1 will give the | |
105 | corresponding byte string ... which then can be decoded to proper unicode. |
|
105 | corresponding byte string ... which then can be decoded to proper unicode. | |
106 | """ |
|
106 | """ | |
107 | org_req = environ.get('tg.original_request') |
|
107 | org_req = environ.get('tg.original_request') | |
108 | if org_req is not None: |
|
108 | if org_req is not None: | |
109 | environ = org_req.environ |
|
109 | environ = org_req.environ | |
110 | return safe_str(environ['PATH_INFO'].encode('latin1')) |
|
110 | return safe_str(environ['PATH_INFO'].encode('latin1')) | |
111 |
|
111 | |||
112 |
|
112 | |||
113 | def log_in_user(user, remember, is_external_auth, ip_addr): |
|
113 | def log_in_user(user, remember, is_external_auth, ip_addr): | |
114 | """ |
|
114 | """ | |
115 | Log a `User` in and update session and cookies. If `remember` is True, |
|
115 | Log a `User` in and update session and cookies. If `remember` is True, | |
116 | the session cookie is set to expire in a year; otherwise, it expires at |
|
116 | the session cookie is set to expire in a year; otherwise, it expires at | |
117 | the end of the browser session. |
|
117 | the end of the browser session. | |
118 |
|
118 | |||
119 | Returns populated `AuthUser` object. |
|
119 | Returns populated `AuthUser` object. | |
120 | """ |
|
120 | """ | |
121 | # It should not be possible to explicitly log in as the default user. |
|
121 | # It should not be possible to explicitly log in as the default user. | |
122 | assert not user.is_default_user, user |
|
122 | assert not user.is_default_user, user | |
123 |
|
123 | |||
124 | auth_user = AuthUser.make(dbuser=user, is_external_auth=is_external_auth, ip_addr=ip_addr) |
|
124 | auth_user = AuthUser.make(dbuser=user, is_external_auth=is_external_auth, ip_addr=ip_addr) | |
125 | if auth_user is None: |
|
125 | if auth_user is None: | |
126 | return None |
|
126 | return None | |
127 |
|
127 | |||
128 | user.update_lastlogin() |
|
128 | user.update_lastlogin() | |
129 | meta.Session().commit() |
|
129 | meta.Session().commit() | |
130 |
|
130 | |||
131 | # Start new session to prevent session fixation attacks. |
|
131 | # Start new session to prevent session fixation attacks. | |
132 | session.invalidate() |
|
132 | session.invalidate() | |
133 | session['authuser'] = cookie = auth_user.to_cookie() |
|
133 | session['authuser'] = cookie = auth_user.to_cookie() | |
134 |
|
134 | |||
135 | # If they want to be remembered, update the cookie. |
|
135 | # If they want to be remembered, update the cookie. | |
136 | # NOTE: Assumes that beaker defaults to browser session cookie. |
|
136 | # NOTE: Assumes that beaker defaults to browser session cookie. | |
137 | if remember: |
|
137 | if remember: | |
138 | t = datetime.datetime.now() + datetime.timedelta(days=365) |
|
138 | t = datetime.datetime.now() + datetime.timedelta(days=365) | |
139 | session._set_cookie_expires(t) |
|
139 | session._set_cookie_expires(t) | |
140 |
|
140 | |||
141 | session.save() |
|
141 | session.save() | |
142 |
|
142 | |||
143 | log.info('user %s is now authenticated and stored in ' |
|
143 | log.info('user %s is now authenticated and stored in ' | |
144 | 'session, session attrs %s', user.username, cookie) |
|
144 | 'session, session attrs %s', user.username, cookie) | |
145 |
|
145 | |||
146 | # dumps session attrs back to cookie |
|
146 | # dumps session attrs back to cookie | |
147 | session._update_cookie_out() |
|
147 | session._update_cookie_out() | |
148 |
|
148 | |||
149 | return auth_user |
|
149 | return auth_user | |
150 |
|
150 | |||
151 |
|
151 | |||
152 | class BasicAuth(paste.auth.basic.AuthBasicAuthenticator): |
|
152 | class BasicAuth(paste.auth.basic.AuthBasicAuthenticator): | |
153 |
|
153 | |||
154 | def __init__(self, realm, authfunc, auth_http_code=None): |
|
154 | def __init__(self, realm, authfunc, auth_http_code=None): | |
155 | self.realm = realm |
|
155 | self.realm = realm | |
156 | self.authfunc = authfunc |
|
156 | self.authfunc = authfunc | |
157 | self._rc_auth_http_code = auth_http_code |
|
157 | self._rc_auth_http_code = auth_http_code | |
158 |
|
158 | |||
159 | def build_authentication(self, environ): |
|
159 | def build_authentication(self, environ): | |
160 | head = paste.httpheaders.WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) |
|
160 | head = paste.httpheaders.WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm) | |
161 | # Consume the whole body before sending a response |
|
161 | # Consume the whole body before sending a response | |
162 | try: |
|
162 | try: | |
163 | request_body_size = int(environ.get('CONTENT_LENGTH', 0)) |
|
163 | request_body_size = int(environ.get('CONTENT_LENGTH', 0)) | |
164 | except (ValueError): |
|
164 | except (ValueError): | |
165 | request_body_size = 0 |
|
165 | request_body_size = 0 | |
166 | environ['wsgi.input'].read(request_body_size) |
|
166 | environ['wsgi.input'].read(request_body_size) | |
167 | if self._rc_auth_http_code and self._rc_auth_http_code == '403': |
|
167 | if self._rc_auth_http_code and self._rc_auth_http_code == '403': | |
168 | # return 403 if alternative http return code is specified in |
|
168 | # return 403 if alternative http return code is specified in | |
169 | # Kallithea config |
|
169 | # Kallithea config | |
170 | return paste.httpexceptions.HTTPForbidden(headers=head) |
|
170 | return paste.httpexceptions.HTTPForbidden(headers=head) | |
171 | return paste.httpexceptions.HTTPUnauthorized(headers=head) |
|
171 | return paste.httpexceptions.HTTPUnauthorized(headers=head) | |
172 |
|
172 | |||
173 | def authenticate(self, environ): |
|
173 | def authenticate(self, environ): | |
174 | authorization = paste.httpheaders.AUTHORIZATION(environ) |
|
174 | authorization = paste.httpheaders.AUTHORIZATION(environ) | |
175 | if not authorization: |
|
175 | if not authorization: | |
176 | return self.build_authentication(environ) |
|
176 | return self.build_authentication(environ) | |
177 | (authmeth, auth) = authorization.split(' ', 1) |
|
177 | (authmeth, auth) = authorization.split(' ', 1) | |
178 | if 'basic' != authmeth.lower(): |
|
178 | if 'basic' != authmeth.lower(): | |
179 | return self.build_authentication(environ) |
|
179 | return self.build_authentication(environ) | |
180 | auth = safe_str(base64.b64decode(auth.strip())) |
|
180 | auth = safe_str(base64.b64decode(auth.strip())) | |
181 | _parts = auth.split(':', 1) |
|
181 | _parts = auth.split(':', 1) | |
182 | if len(_parts) == 2: |
|
182 | if len(_parts) == 2: | |
183 | username, password = _parts |
|
183 | username, password = _parts | |
184 | if self.authfunc(username, password, environ) is not None: |
|
184 | if self.authfunc(username, password, environ) is not None: | |
185 | return username |
|
185 | return username | |
186 | return self.build_authentication(environ) |
|
186 | return self.build_authentication(environ) | |
187 |
|
187 | |||
188 | __call__ = authenticate |
|
188 | __call__ = authenticate | |
189 |
|
189 | |||
190 |
|
190 | |||
191 | class BaseVCSController(object): |
|
191 | class BaseVCSController(object): | |
192 | """Base controller for handling Mercurial/Git protocol requests |
|
192 | """Base controller for handling Mercurial/Git protocol requests | |
193 | (coming from a VCS client, and not a browser). |
|
193 | (coming from a VCS client, and not a browser). | |
194 | """ |
|
194 | """ | |
195 |
|
195 | |||
196 | scm_alias = None # 'hg' / 'git' |
|
196 | scm_alias = None # 'hg' / 'git' | |
197 |
|
197 | |||
198 | def __init__(self, application, config): |
|
198 | def __init__(self, application, config): | |
199 | self.application = application |
|
199 | self.application = application | |
200 | self.config = config |
|
200 | self.config = config | |
201 | # base path of repo locations |
|
201 | # base path of repo locations | |
202 | self.basepath = self.config['base_path'] |
|
202 | self.basepath = self.config['base_path'] | |
203 | # authenticate this VCS request using the authentication modules |
|
203 | # authenticate this VCS request using the authentication modules | |
204 | self.authenticate = BasicAuth('', auth_modules.authenticate, |
|
204 | self.authenticate = BasicAuth('', auth_modules.authenticate, | |
205 | config.get('auth_ret_code')) |
|
205 | config.get('auth_ret_code')) | |
206 |
|
206 | |||
207 | @classmethod |
|
207 | @classmethod | |
208 | def parse_request(cls, environ): |
|
208 | def parse_request(cls, environ): | |
209 | """If request is parsed as a request for this VCS, return a namespace with the parsed request. |
|
209 | """If request is parsed as a request for this VCS, return a namespace with the parsed request. | |
210 | If the request is unknown, return None. |
|
210 | If the request is unknown, return None. | |
211 | """ |
|
211 | """ | |
212 | raise NotImplementedError() |
|
212 | raise NotImplementedError() | |
213 |
|
213 | |||
214 | def _authorize(self, environ, action, repo_name, ip_addr): |
|
214 | def _authorize(self, environ, action, repo_name, ip_addr): | |
215 | """Authenticate and authorize user. |
|
215 | """Authenticate and authorize user. | |
216 |
|
216 | |||
217 | Since we're dealing with a VCS client and not a browser, we only |
|
217 | Since we're dealing with a VCS client and not a browser, we only | |
218 | support HTTP basic authentication, either directly via raw header |
|
218 | support HTTP basic authentication, either directly via raw header | |
219 | inspection, or by using container authentication to delegate the |
|
219 | inspection, or by using container authentication to delegate the | |
220 | authentication to the web server. |
|
220 | authentication to the web server. | |
221 |
|
221 | |||
222 | Returns (user, None) on successful authentication and authorization. |
|
222 | Returns (user, None) on successful authentication and authorization. | |
223 | Returns (None, wsgi_app) to send the wsgi_app response to the client. |
|
223 | Returns (None, wsgi_app) to send the wsgi_app response to the client. | |
224 | """ |
|
224 | """ | |
225 | # Use anonymous access if allowed for action on repo. |
|
225 | # Use anonymous access if allowed for action on repo. | |
226 | default_user = User.get_default_user() |
|
226 | default_user = User.get_default_user() | |
227 | default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) |
|
227 | default_authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) | |
228 | if default_authuser is None: |
|
228 | if default_authuser is None: | |
229 | log.debug('No anonymous access at all') # move on to proper user auth |
|
229 | log.debug('No anonymous access at all') # move on to proper user auth | |
230 | else: |
|
230 | else: | |
231 | if self._check_permission(action, default_authuser, repo_name): |
|
231 | if self._check_permission(action, default_authuser, repo_name): | |
232 | return default_authuser, None |
|
232 | return default_authuser, None | |
233 | log.debug('Not authorized to access this repository as anonymous user') |
|
233 | log.debug('Not authorized to access this repository as anonymous user') | |
234 |
|
234 | |||
235 | username = None |
|
235 | username = None | |
236 | #============================================================== |
|
236 | #============================================================== | |
237 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE |
|
237 | # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE | |
238 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS |
|
238 | # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS | |
239 | #============================================================== |
|
239 | #============================================================== | |
240 |
|
240 | |||
241 | # try to auth based on environ, container auth methods |
|
241 | # try to auth based on environ, container auth methods | |
242 | log.debug('Running PRE-AUTH for container based authentication') |
|
242 | log.debug('Running PRE-AUTH for container based authentication') | |
243 | pre_auth = auth_modules.authenticate('', '', environ) |
|
243 | pre_auth = auth_modules.authenticate('', '', environ) | |
244 | if pre_auth is not None and pre_auth.get('username'): |
|
244 | if pre_auth is not None and pre_auth.get('username'): | |
245 | username = pre_auth['username'] |
|
245 | username = pre_auth['username'] | |
246 | log.debug('PRE-AUTH got %s as username', username) |
|
246 | log.debug('PRE-AUTH got %s as username', username) | |
247 |
|
247 | |||
248 | # If not authenticated by the container, running basic auth |
|
248 | # If not authenticated by the container, running basic auth | |
249 | if not username: |
|
249 | if not username: | |
250 | self.authenticate.realm = self.config['realm'] |
|
250 | self.authenticate.realm = self.config['realm'] | |
251 | result = self.authenticate(environ) |
|
251 | result = self.authenticate(environ) | |
252 | if isinstance(result, str): |
|
252 | if isinstance(result, str): | |
253 | paste.httpheaders.AUTH_TYPE.update(environ, 'basic') |
|
253 | paste.httpheaders.AUTH_TYPE.update(environ, 'basic') | |
254 | paste.httpheaders.REMOTE_USER.update(environ, result) |
|
254 | paste.httpheaders.REMOTE_USER.update(environ, result) | |
255 | username = result |
|
255 | username = result | |
256 | else: |
|
256 | else: | |
257 | return None, result.wsgi_application |
|
257 | return None, result.wsgi_application | |
258 |
|
258 | |||
259 | #============================================================== |
|
259 | #============================================================== | |
260 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME |
|
260 | # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME | |
261 | #============================================================== |
|
261 | #============================================================== | |
262 | try: |
|
262 | try: | |
263 | user = User.get_by_username_or_email(username) |
|
263 | user = User.get_by_username_or_email(username) | |
264 | except Exception: |
|
264 | except Exception: | |
265 | log.error(traceback.format_exc()) |
|
265 | log.error(traceback.format_exc()) | |
266 | return None, webob.exc.HTTPInternalServerError() |
|
266 | return None, webob.exc.HTTPInternalServerError() | |
267 |
|
267 | |||
268 | authuser = AuthUser.make(dbuser=user, ip_addr=ip_addr) |
|
268 | authuser = AuthUser.make(dbuser=user, ip_addr=ip_addr) | |
269 | if authuser is None: |
|
269 | if authuser is None: | |
270 | return None, webob.exc.HTTPForbidden() |
|
270 | return None, webob.exc.HTTPForbidden() | |
271 | if not self._check_permission(action, authuser, repo_name): |
|
271 | if not self._check_permission(action, authuser, repo_name): | |
272 | return None, webob.exc.HTTPForbidden() |
|
272 | return None, webob.exc.HTTPForbidden() | |
273 |
|
273 | |||
274 | return user, None |
|
274 | return user, None | |
275 |
|
275 | |||
276 | def _handle_request(self, environ, start_response): |
|
276 | def _handle_request(self, environ, start_response): | |
277 | raise NotImplementedError() |
|
277 | raise NotImplementedError() | |
278 |
|
278 | |||
279 | def _check_permission(self, action, authuser, repo_name): |
|
279 | def _check_permission(self, action, authuser, repo_name): | |
280 | """ |
|
280 | """ | |
281 | :param action: 'push' or 'pull' |
|
281 | :param action: 'push' or 'pull' | |
282 | :param user: `AuthUser` instance |
|
282 | :param user: `AuthUser` instance | |
283 | :param repo_name: repository name |
|
283 | :param repo_name: repository name | |
284 | """ |
|
284 | """ | |
285 | if action == 'push': |
|
285 | if action == 'push': | |
286 | if not HasPermissionAnyMiddleware('repository.write', |
|
286 | if not HasPermissionAnyMiddleware('repository.write', | |
287 | 'repository.admin')(authuser, |
|
287 | 'repository.admin')(authuser, | |
288 | repo_name): |
|
288 | repo_name): | |
289 | return False |
|
289 | return False | |
290 |
|
290 | |||
291 | elif action == 'pull': |
|
291 | elif action == 'pull': | |
292 | #any other action need at least read permission |
|
292 | #any other action need at least read permission | |
293 | if not HasPermissionAnyMiddleware('repository.read', |
|
293 | if not HasPermissionAnyMiddleware('repository.read', | |
294 | 'repository.write', |
|
294 | 'repository.write', | |
295 | 'repository.admin')(authuser, |
|
295 | 'repository.admin')(authuser, | |
296 | repo_name): |
|
296 | repo_name): | |
297 | return False |
|
297 | return False | |
298 |
|
298 | |||
299 | else: |
|
299 | else: | |
300 | assert False, action |
|
300 | assert False, action | |
301 |
|
301 | |||
302 | return True |
|
302 | return True | |
303 |
|
303 | |||
304 | def _get_ip_addr(self, environ): |
|
304 | def _get_ip_addr(self, environ): | |
305 | return _get_ip_addr(environ) |
|
305 | return _get_ip_addr(environ) | |
306 |
|
306 | |||
307 | def __call__(self, environ, start_response): |
|
307 | def __call__(self, environ, start_response): | |
308 | try: |
|
308 | try: | |
309 | # try parsing a request for this VCS - if it fails, call the wrapped app |
|
309 | # try parsing a request for this VCS - if it fails, call the wrapped app | |
310 | parsed_request = self.parse_request(environ) |
|
310 | parsed_request = self.parse_request(environ) | |
311 | if parsed_request is None: |
|
311 | if parsed_request is None: | |
312 | return self.application(environ, start_response) |
|
312 | return self.application(environ, start_response) | |
313 |
|
313 | |||
314 | # skip passing error to error controller |
|
314 | # skip passing error to error controller | |
315 | environ['pylons.status_code_redirect'] = True |
|
315 | environ['pylons.status_code_redirect'] = True | |
316 |
|
316 | |||
317 | # quick check if repo exists... |
|
317 | # quick check if repo exists... | |
318 | if not is_valid_repo(parsed_request.repo_name, self.basepath, self.scm_alias): |
|
318 | if not is_valid_repo(parsed_request.repo_name, self.basepath, self.scm_alias): | |
319 | raise webob.exc.HTTPNotFound() |
|
319 | raise webob.exc.HTTPNotFound() | |
320 |
|
320 | |||
321 | if parsed_request.action is None: |
|
321 | if parsed_request.action is None: | |
322 | # Note: the client doesn't get the helpful error message |
|
322 | # Note: the client doesn't get the helpful error message | |
323 | raise webob.exc.HTTPBadRequest('Unable to detect pull/push action for %r! Are you using a nonstandard command or client?' % parsed_request.repo_name) |
|
323 | raise webob.exc.HTTPBadRequest('Unable to detect pull/push action for %r! Are you using a nonstandard command or client?' % parsed_request.repo_name) | |
324 |
|
324 | |||
325 | #====================================================================== |
|
325 | #====================================================================== | |
326 | # CHECK PERMISSIONS |
|
326 | # CHECK PERMISSIONS | |
327 | #====================================================================== |
|
327 | #====================================================================== | |
328 | ip_addr = self._get_ip_addr(environ) |
|
328 | ip_addr = self._get_ip_addr(environ) | |
329 | user, response_app = self._authorize(environ, parsed_request.action, parsed_request.repo_name, ip_addr) |
|
329 | user, response_app = self._authorize(environ, parsed_request.action, parsed_request.repo_name, ip_addr) | |
330 | if response_app is not None: |
|
330 | if response_app is not None: | |
331 | return response_app(environ, start_response) |
|
331 | return response_app(environ, start_response) | |
332 |
|
332 | |||
333 | #====================================================================== |
|
333 | #====================================================================== | |
334 | # REQUEST HANDLING |
|
334 | # REQUEST HANDLING | |
335 | #====================================================================== |
|
335 | #====================================================================== | |
336 | set_hook_environment(user.username, ip_addr, |
|
336 | set_hook_environment(user.username, ip_addr, | |
337 | parsed_request.repo_name, self.scm_alias, parsed_request.action) |
|
337 | parsed_request.repo_name, self.scm_alias, parsed_request.action) | |
338 |
|
338 | |||
339 | try: |
|
339 | try: | |
340 | log.info('%s action on %s repo "%s" by "%s" from %s', |
|
340 | log.info('%s action on %s repo "%s" by "%s" from %s', | |
341 | parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr) |
|
341 | parsed_request.action, self.scm_alias, parsed_request.repo_name, user.username, ip_addr) | |
342 | app = self._make_app(parsed_request) |
|
342 | app = self._make_app(parsed_request) | |
343 | return app(environ, start_response) |
|
343 | return app(environ, start_response) | |
344 | except Exception: |
|
344 | except Exception: | |
345 | log.error(traceback.format_exc()) |
|
345 | log.error(traceback.format_exc()) | |
346 | raise webob.exc.HTTPInternalServerError() |
|
346 | raise webob.exc.HTTPInternalServerError() | |
347 |
|
347 | |||
348 | except webob.exc.HTTPException as e: |
|
348 | except webob.exc.HTTPException as e: | |
349 | return e(environ, start_response) |
|
349 | return e(environ, start_response) | |
350 |
|
350 | |||
351 |
|
351 | |||
352 | class BaseController(TGController): |
|
352 | class BaseController(TGController): | |
353 |
|
353 | |||
354 | def _before(self, *args, **kwargs): |
|
354 | def _before(self, *args, **kwargs): | |
355 | """ |
|
355 | """ | |
356 | _before is called before controller methods and after __call__ |
|
356 | _before is called before controller methods and after __call__ | |
357 | """ |
|
357 | """ | |
358 | if request.needs_csrf_check: |
|
358 | if request.needs_csrf_check: | |
359 | # CSRF protection: Whenever a request has ambient authority (whether |
|
359 | # CSRF protection: Whenever a request has ambient authority (whether | |
360 | # through a session cookie or its origin IP address), it must include |
|
360 | # through a session cookie or its origin IP address), it must include | |
361 | # the correct token, unless the HTTP method is GET or HEAD (and thus |
|
361 | # the correct token, unless the HTTP method is GET or HEAD (and thus | |
362 | # guaranteed to be side effect free. In practice, the only situation |
|
362 | # guaranteed to be side effect free. In practice, the only situation | |
363 | # where we allow side effects without ambient authority is when the |
|
363 | # where we allow side effects without ambient authority is when the | |
364 | # authority comes from an API key; and that is handled above. |
|
364 | # authority comes from an API key; and that is handled above. | |
365 | from kallithea.lib import helpers as h |
|
365 | from kallithea.lib import helpers as h | |
366 | token = request.POST.get(h.session_csrf_secret_name) |
|
366 | token = request.POST.get(h.session_csrf_secret_name) | |
367 | if not token or token != h.session_csrf_secret_token(): |
|
367 | if not token or token != h.session_csrf_secret_token(): | |
368 | log.error('CSRF check failed') |
|
368 | log.error('CSRF check failed') | |
369 | raise webob.exc.HTTPForbidden() |
|
369 | raise webob.exc.HTTPForbidden() | |
370 |
|
370 | |||
371 | c.kallithea_version = __version__ |
|
371 | c.kallithea_version = kallithea.__version__ | |
372 | rc_config = Setting.get_app_settings() |
|
372 | rc_config = Setting.get_app_settings() | |
373 |
|
373 | |||
374 | # Visual options |
|
374 | # Visual options | |
375 | c.visual = AttributeDict({}) |
|
375 | c.visual = AttributeDict({}) | |
376 |
|
376 | |||
377 | ## DB stored |
|
377 | ## DB stored | |
378 | c.visual.show_public_icon = asbool(rc_config.get('show_public_icon')) |
|
378 | c.visual.show_public_icon = asbool(rc_config.get('show_public_icon')) | |
379 | c.visual.show_private_icon = asbool(rc_config.get('show_private_icon')) |
|
379 | c.visual.show_private_icon = asbool(rc_config.get('show_private_icon')) | |
380 | c.visual.stylify_metalabels = asbool(rc_config.get('stylify_metalabels')) |
|
380 | c.visual.stylify_metalabels = asbool(rc_config.get('stylify_metalabels')) | |
381 | c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100)) |
|
381 | c.visual.page_size = safe_int(rc_config.get('dashboard_items', 100)) | |
382 | c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100)) |
|
382 | c.visual.admin_grid_items = safe_int(rc_config.get('admin_grid_items', 100)) | |
383 | c.visual.repository_fields = asbool(rc_config.get('repository_fields')) |
|
383 | c.visual.repository_fields = asbool(rc_config.get('repository_fields')) | |
384 | c.visual.show_version = asbool(rc_config.get('show_version')) |
|
384 | c.visual.show_version = asbool(rc_config.get('show_version')) | |
385 | c.visual.use_gravatar = asbool(rc_config.get('use_gravatar')) |
|
385 | c.visual.use_gravatar = asbool(rc_config.get('use_gravatar')) | |
386 | c.visual.gravatar_url = rc_config.get('gravatar_url') |
|
386 | c.visual.gravatar_url = rc_config.get('gravatar_url') | |
387 |
|
387 | |||
388 | c.ga_code = rc_config.get('ga_code') |
|
388 | c.ga_code = rc_config.get('ga_code') | |
389 | # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code |
|
389 | # TODO: replace undocumented backwards compatibility hack with db upgrade and rename ga_code | |
390 | if c.ga_code and '<' not in c.ga_code: |
|
390 | if c.ga_code and '<' not in c.ga_code: | |
391 | c.ga_code = '''<script type="text/javascript"> |
|
391 | c.ga_code = '''<script type="text/javascript"> | |
392 | var _gaq = _gaq || []; |
|
392 | var _gaq = _gaq || []; | |
393 | _gaq.push(['_setAccount', '%s']); |
|
393 | _gaq.push(['_setAccount', '%s']); | |
394 | _gaq.push(['_trackPageview']); |
|
394 | _gaq.push(['_trackPageview']); | |
395 |
|
395 | |||
396 | (function() { |
|
396 | (function() { | |
397 | var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; |
|
397 | var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; | |
398 | ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; |
|
398 | ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; | |
399 | var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); |
|
399 | var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); | |
400 | })(); |
|
400 | })(); | |
401 | </script>''' % c.ga_code |
|
401 | </script>''' % c.ga_code | |
402 | c.site_name = rc_config.get('title') |
|
402 | c.site_name = rc_config.get('title') | |
403 | c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI |
|
403 | c.clone_uri_tmpl = rc_config.get('clone_uri_tmpl') or Repository.DEFAULT_CLONE_URI | |
404 | c.clone_ssh_tmpl = rc_config.get('clone_ssh_tmpl') or Repository.DEFAULT_CLONE_SSH |
|
404 | c.clone_ssh_tmpl = rc_config.get('clone_ssh_tmpl') or Repository.DEFAULT_CLONE_SSH | |
405 |
|
405 | |||
406 | ## INI stored |
|
406 | ## INI stored | |
407 | c.visual.allow_repo_location_change = asbool(config.get('allow_repo_location_change', True)) |
|
407 | c.visual.allow_repo_location_change = asbool(config.get('allow_repo_location_change', True)) | |
408 | c.visual.allow_custom_hooks_settings = asbool(config.get('allow_custom_hooks_settings', True)) |
|
408 | c.visual.allow_custom_hooks_settings = asbool(config.get('allow_custom_hooks_settings', True)) | |
409 | c.ssh_enabled = asbool(config.get('ssh_enabled', False)) |
|
409 | c.ssh_enabled = asbool(config.get('ssh_enabled', False)) | |
410 |
|
410 | |||
411 | c.instance_id = config.get('instance_id') |
|
411 | c.instance_id = config.get('instance_id') | |
412 | c.issues_url = config.get('bugtracker', url('issues_url')) |
|
412 | c.issues_url = config.get('bugtracker', url('issues_url')) | |
413 | # END CONFIG VARS |
|
413 | # END CONFIG VARS | |
414 |
|
414 | |||
415 | c.repo_name = get_repo_slug(request) # can be empty |
|
415 | c.repo_name = get_repo_slug(request) # can be empty | |
416 | c.backends = list(BACKENDS) |
|
416 | c.backends = list(kallithea.BACKENDS) | |
417 |
|
417 | |||
418 | self.cut_off_limit = safe_int(config.get('cut_off_limit')) |
|
418 | self.cut_off_limit = safe_int(config.get('cut_off_limit')) | |
419 |
|
419 | |||
420 | c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() |
|
420 | c.my_pr_count = PullRequest.query(reviewer_id=request.authuser.user_id, include_closed=False).count() | |
421 |
|
421 | |||
422 | self.scm_model = ScmModel() |
|
422 | self.scm_model = ScmModel() | |
423 |
|
423 | |||
424 | @staticmethod |
|
424 | @staticmethod | |
425 | def _determine_auth_user(session_authuser, ip_addr): |
|
425 | def _determine_auth_user(session_authuser, ip_addr): | |
426 | """ |
|
426 | """ | |
427 | Create an `AuthUser` object given the API key/bearer token |
|
427 | Create an `AuthUser` object given the API key/bearer token | |
428 | (if any) and the value of the authuser session cookie. |
|
428 | (if any) and the value of the authuser session cookie. | |
429 | Returns None if no valid user is found (like not active or no access for IP). |
|
429 | Returns None if no valid user is found (like not active or no access for IP). | |
430 | """ |
|
430 | """ | |
431 |
|
431 | |||
432 | # Authenticate by session cookie |
|
432 | # Authenticate by session cookie | |
433 | # In ancient login sessions, 'authuser' may not be a dict. |
|
433 | # In ancient login sessions, 'authuser' may not be a dict. | |
434 | # In that case, the user will have to log in again. |
|
434 | # In that case, the user will have to log in again. | |
435 | # v0.3 and earlier included an 'is_authenticated' key; if present, |
|
435 | # v0.3 and earlier included an 'is_authenticated' key; if present, | |
436 | # this must be True. |
|
436 | # this must be True. | |
437 | if isinstance(session_authuser, dict) and session_authuser.get('is_authenticated', True): |
|
437 | if isinstance(session_authuser, dict) and session_authuser.get('is_authenticated', True): | |
438 | return AuthUser.from_cookie(session_authuser, ip_addr=ip_addr) |
|
438 | return AuthUser.from_cookie(session_authuser, ip_addr=ip_addr) | |
439 |
|
439 | |||
440 | # Authenticate by auth_container plugin (if enabled) |
|
440 | # Authenticate by auth_container plugin (if enabled) | |
441 | if any( |
|
441 | if any( | |
442 | plugin.is_container_auth |
|
442 | plugin.is_container_auth | |
443 | for plugin in auth_modules.get_auth_plugins() |
|
443 | for plugin in auth_modules.get_auth_plugins() | |
444 | ): |
|
444 | ): | |
445 | try: |
|
445 | try: | |
446 | user_info = auth_modules.authenticate('', '', request.environ) |
|
446 | user_info = auth_modules.authenticate('', '', request.environ) | |
447 | except UserCreationError as e: |
|
447 | except UserCreationError as e: | |
448 | from kallithea.lib import helpers as h |
|
448 | from kallithea.lib import helpers as h | |
449 | h.flash(e, 'error', logf=log.error) |
|
449 | h.flash(e, 'error', logf=log.error) | |
450 | else: |
|
450 | else: | |
451 | if user_info is not None: |
|
451 | if user_info is not None: | |
452 | username = user_info['username'] |
|
452 | username = user_info['username'] | |
453 | user = User.get_by_username(username, case_insensitive=True) |
|
453 | user = User.get_by_username(username, case_insensitive=True) | |
454 | return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr) |
|
454 | return log_in_user(user, remember=False, is_external_auth=True, ip_addr=ip_addr) | |
455 |
|
455 | |||
456 | # User is default user (if active) or anonymous |
|
456 | # User is default user (if active) or anonymous | |
457 | default_user = User.get_default_user() |
|
457 | default_user = User.get_default_user() | |
458 | authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) |
|
458 | authuser = AuthUser.make(dbuser=default_user, ip_addr=ip_addr) | |
459 | if authuser is None: # fall back to anonymous |
|
459 | if authuser is None: # fall back to anonymous | |
460 | authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make? |
|
460 | authuser = AuthUser(dbuser=default_user) # TODO: somehow use .make? | |
461 | return authuser |
|
461 | return authuser | |
462 |
|
462 | |||
463 | @staticmethod |
|
463 | @staticmethod | |
464 | def _basic_security_checks(): |
|
464 | def _basic_security_checks(): | |
465 | """Perform basic security/sanity checks before processing the request.""" |
|
465 | """Perform basic security/sanity checks before processing the request.""" | |
466 |
|
466 | |||
467 | # Only allow the following HTTP request methods. |
|
467 | # Only allow the following HTTP request methods. | |
468 | if request.method not in ['GET', 'HEAD', 'POST']: |
|
468 | if request.method not in ['GET', 'HEAD', 'POST']: | |
469 | raise webob.exc.HTTPMethodNotAllowed() |
|
469 | raise webob.exc.HTTPMethodNotAllowed() | |
470 |
|
470 | |||
471 | # Also verify the _method override - no longer allowed. |
|
471 | # Also verify the _method override - no longer allowed. | |
472 | if request.params.get('_method') is None: |
|
472 | if request.params.get('_method') is None: | |
473 | pass # no override, no problem |
|
473 | pass # no override, no problem | |
474 | else: |
|
474 | else: | |
475 | raise webob.exc.HTTPMethodNotAllowed() |
|
475 | raise webob.exc.HTTPMethodNotAllowed() | |
476 |
|
476 | |||
477 | # Make sure CSRF token never appears in the URL. If so, invalidate it. |
|
477 | # Make sure CSRF token never appears in the URL. If so, invalidate it. | |
478 | from kallithea.lib import helpers as h |
|
478 | from kallithea.lib import helpers as h | |
479 | if h.session_csrf_secret_name in request.GET: |
|
479 | if h.session_csrf_secret_name in request.GET: | |
480 | log.error('CSRF key leak detected') |
|
480 | log.error('CSRF key leak detected') | |
481 | session.pop(h.session_csrf_secret_name, None) |
|
481 | session.pop(h.session_csrf_secret_name, None) | |
482 | session.save() |
|
482 | session.save() | |
483 | h.flash(_('CSRF token leak has been detected - all form tokens have been expired'), |
|
483 | h.flash(_('CSRF token leak has been detected - all form tokens have been expired'), | |
484 | category='error') |
|
484 | category='error') | |
485 |
|
485 | |||
486 | # WebOb already ignores request payload parameters for anything other |
|
486 | # WebOb already ignores request payload parameters for anything other | |
487 | # than POST/PUT, but double-check since other Kallithea code relies on |
|
487 | # than POST/PUT, but double-check since other Kallithea code relies on | |
488 | # this assumption. |
|
488 | # this assumption. | |
489 | if request.method not in ['POST', 'PUT'] and request.POST: |
|
489 | if request.method not in ['POST', 'PUT'] and request.POST: | |
490 | log.error('%r request with payload parameters; WebOb should have stopped this', request.method) |
|
490 | log.error('%r request with payload parameters; WebOb should have stopped this', request.method) | |
491 | raise webob.exc.HTTPBadRequest() |
|
491 | raise webob.exc.HTTPBadRequest() | |
492 |
|
492 | |||
493 | def __call__(self, environ, context): |
|
493 | def __call__(self, environ, context): | |
494 | try: |
|
494 | try: | |
495 | ip_addr = _get_ip_addr(environ) |
|
495 | ip_addr = _get_ip_addr(environ) | |
496 | self._basic_security_checks() |
|
496 | self._basic_security_checks() | |
497 |
|
497 | |||
498 | api_key = request.GET.get('api_key') |
|
498 | api_key = request.GET.get('api_key') | |
499 | try: |
|
499 | try: | |
500 | # Request.authorization may raise ValueError on invalid input |
|
500 | # Request.authorization may raise ValueError on invalid input | |
501 | type, params = request.authorization |
|
501 | type, params = request.authorization | |
502 | except (ValueError, TypeError): |
|
502 | except (ValueError, TypeError): | |
503 | pass |
|
503 | pass | |
504 | else: |
|
504 | else: | |
505 | if type.lower() == 'bearer': |
|
505 | if type.lower() == 'bearer': | |
506 | api_key = params # bearer token is an api key too |
|
506 | api_key = params # bearer token is an api key too | |
507 |
|
507 | |||
508 | if api_key is None: |
|
508 | if api_key is None: | |
509 | authuser = self._determine_auth_user( |
|
509 | authuser = self._determine_auth_user( | |
510 | session.get('authuser'), |
|
510 | session.get('authuser'), | |
511 | ip_addr=ip_addr, |
|
511 | ip_addr=ip_addr, | |
512 | ) |
|
512 | ) | |
513 | needs_csrf_check = request.method not in ['GET', 'HEAD'] |
|
513 | needs_csrf_check = request.method not in ['GET', 'HEAD'] | |
514 |
|
514 | |||
515 | else: |
|
515 | else: | |
516 | dbuser = User.get_by_api_key(api_key) |
|
516 | dbuser = User.get_by_api_key(api_key) | |
517 | if dbuser is None: |
|
517 | if dbuser is None: | |
518 | log.info('No db user found for authentication with API key ****%s from %s', |
|
518 | log.info('No db user found for authentication with API key ****%s from %s', | |
519 | api_key[-4:], ip_addr) |
|
519 | api_key[-4:], ip_addr) | |
520 | authuser = AuthUser.make(dbuser=dbuser, is_external_auth=True, ip_addr=ip_addr) |
|
520 | authuser = AuthUser.make(dbuser=dbuser, is_external_auth=True, ip_addr=ip_addr) | |
521 | needs_csrf_check = False # API key provides CSRF protection |
|
521 | needs_csrf_check = False # API key provides CSRF protection | |
522 |
|
522 | |||
523 | if authuser is None: |
|
523 | if authuser is None: | |
524 | log.info('No valid user found') |
|
524 | log.info('No valid user found') | |
525 | raise webob.exc.HTTPForbidden() |
|
525 | raise webob.exc.HTTPForbidden() | |
526 |
|
526 | |||
527 | # set globals for auth user |
|
527 | # set globals for auth user | |
528 | request.authuser = authuser |
|
528 | request.authuser = authuser | |
529 | request.ip_addr = ip_addr |
|
529 | request.ip_addr = ip_addr | |
530 | request.needs_csrf_check = needs_csrf_check |
|
530 | request.needs_csrf_check = needs_csrf_check | |
531 |
|
531 | |||
532 | log.info('IP: %s User: %s Request: %s', |
|
532 | log.info('IP: %s User: %s Request: %s', | |
533 | request.ip_addr, request.authuser, |
|
533 | request.ip_addr, request.authuser, | |
534 | get_path_info(environ), |
|
534 | get_path_info(environ), | |
535 | ) |
|
535 | ) | |
536 | return super(BaseController, self).__call__(environ, context) |
|
536 | return super(BaseController, self).__call__(environ, context) | |
537 | except webob.exc.HTTPException as e: |
|
537 | except webob.exc.HTTPException as e: | |
538 | return e |
|
538 | return e | |
539 |
|
539 | |||
540 |
|
540 | |||
541 | class BaseRepoController(BaseController): |
|
541 | class BaseRepoController(BaseController): | |
542 | """ |
|
542 | """ | |
543 | Base class for controllers responsible for loading all needed data for |
|
543 | Base class for controllers responsible for loading all needed data for | |
544 | repository loaded items are |
|
544 | repository loaded items are | |
545 |
|
545 | |||
546 | c.db_repo_scm_instance: instance of scm repository |
|
546 | c.db_repo_scm_instance: instance of scm repository | |
547 | c.db_repo: instance of db |
|
547 | c.db_repo: instance of db | |
548 | c.repository_followers: number of followers |
|
548 | c.repository_followers: number of followers | |
549 | c.repository_forks: number of forks |
|
549 | c.repository_forks: number of forks | |
550 | c.repository_following: weather the current user is following the current repo |
|
550 | c.repository_following: weather the current user is following the current repo | |
551 | """ |
|
551 | """ | |
552 |
|
552 | |||
553 | def _before(self, *args, **kwargs): |
|
553 | def _before(self, *args, **kwargs): | |
554 | super(BaseRepoController, self)._before(*args, **kwargs) |
|
554 | super(BaseRepoController, self)._before(*args, **kwargs) | |
555 | if c.repo_name: # extracted from request by base-base BaseController._before |
|
555 | if c.repo_name: # extracted from request by base-base BaseController._before | |
556 | _dbr = Repository.get_by_repo_name(c.repo_name) |
|
556 | _dbr = Repository.get_by_repo_name(c.repo_name) | |
557 | if not _dbr: |
|
557 | if not _dbr: | |
558 | return |
|
558 | return | |
559 |
|
559 | |||
560 | log.debug('Found repository in database %s with state `%s`', |
|
560 | log.debug('Found repository in database %s with state `%s`', | |
561 | _dbr, _dbr.repo_state) |
|
561 | _dbr, _dbr.repo_state) | |
562 | route = getattr(request.environ.get('routes.route'), 'name', '') |
|
562 | route = getattr(request.environ.get('routes.route'), 'name', '') | |
563 |
|
563 | |||
564 | # allow to delete repos that are somehow damages in filesystem |
|
564 | # allow to delete repos that are somehow damages in filesystem | |
565 | if route in ['delete_repo']: |
|
565 | if route in ['delete_repo']: | |
566 | return |
|
566 | return | |
567 |
|
567 | |||
568 | if _dbr.repo_state in [Repository.STATE_PENDING]: |
|
568 | if _dbr.repo_state in [Repository.STATE_PENDING]: | |
569 | if route in ['repo_creating_home']: |
|
569 | if route in ['repo_creating_home']: | |
570 | return |
|
570 | return | |
571 | check_url = url('repo_creating_home', repo_name=c.repo_name) |
|
571 | check_url = url('repo_creating_home', repo_name=c.repo_name) | |
572 | raise webob.exc.HTTPFound(location=check_url) |
|
572 | raise webob.exc.HTTPFound(location=check_url) | |
573 |
|
573 | |||
574 | dbr = c.db_repo = _dbr |
|
574 | dbr = c.db_repo = _dbr | |
575 | c.db_repo_scm_instance = c.db_repo.scm_instance |
|
575 | c.db_repo_scm_instance = c.db_repo.scm_instance | |
576 | if c.db_repo_scm_instance is None: |
|
576 | if c.db_repo_scm_instance is None: | |
577 | log.error('%s this repository is present in database but it ' |
|
577 | log.error('%s this repository is present in database but it ' | |
578 | 'cannot be created as an scm instance', c.repo_name) |
|
578 | 'cannot be created as an scm instance', c.repo_name) | |
579 | from kallithea.lib import helpers as h |
|
579 | from kallithea.lib import helpers as h | |
580 | h.flash(_('Repository not found in the filesystem'), |
|
580 | h.flash(_('Repository not found in the filesystem'), | |
581 | category='error') |
|
581 | category='error') | |
582 | raise webob.exc.HTTPNotFound() |
|
582 | raise webob.exc.HTTPNotFound() | |
583 |
|
583 | |||
584 | # some globals counter for menu |
|
584 | # some globals counter for menu | |
585 | c.repository_followers = self.scm_model.get_followers(dbr) |
|
585 | c.repository_followers = self.scm_model.get_followers(dbr) | |
586 | c.repository_forks = self.scm_model.get_forks(dbr) |
|
586 | c.repository_forks = self.scm_model.get_forks(dbr) | |
587 | c.repository_pull_requests = self.scm_model.get_pull_requests(dbr) |
|
587 | c.repository_pull_requests = self.scm_model.get_pull_requests(dbr) | |
588 | c.repository_following = self.scm_model.is_following_repo( |
|
588 | c.repository_following = self.scm_model.is_following_repo( | |
589 | c.repo_name, request.authuser.user_id) |
|
589 | c.repo_name, request.authuser.user_id) | |
590 |
|
590 | |||
591 | @staticmethod |
|
591 | @staticmethod | |
592 | def _get_ref_rev(repo, ref_type, ref_name, returnempty=False): |
|
592 | def _get_ref_rev(repo, ref_type, ref_name, returnempty=False): | |
593 | """ |
|
593 | """ | |
594 | Safe way to get changeset. If error occurs show error. |
|
594 | Safe way to get changeset. If error occurs show error. | |
595 | """ |
|
595 | """ | |
596 | from kallithea.lib import helpers as h |
|
596 | from kallithea.lib import helpers as h | |
597 | try: |
|
597 | try: | |
598 | return repo.scm_instance.get_ref_revision(ref_type, ref_name) |
|
598 | return repo.scm_instance.get_ref_revision(ref_type, ref_name) | |
599 | except EmptyRepositoryError as e: |
|
599 | except EmptyRepositoryError as e: | |
600 | if returnempty: |
|
600 | if returnempty: | |
601 | return repo.scm_instance.EMPTY_CHANGESET |
|
601 | return repo.scm_instance.EMPTY_CHANGESET | |
602 | h.flash(_('There are no changesets yet'), category='error') |
|
602 | h.flash(_('There are no changesets yet'), category='error') | |
603 | raise webob.exc.HTTPNotFound() |
|
603 | raise webob.exc.HTTPNotFound() | |
604 | except ChangesetDoesNotExistError as e: |
|
604 | except ChangesetDoesNotExistError as e: | |
605 | h.flash(_('Changeset for %s %s not found in %s') % |
|
605 | h.flash(_('Changeset for %s %s not found in %s') % | |
606 | (ref_type, ref_name, repo.repo_name), |
|
606 | (ref_type, ref_name, repo.repo_name), | |
607 | category='error') |
|
607 | category='error') | |
608 | raise webob.exc.HTTPNotFound() |
|
608 | raise webob.exc.HTTPNotFound() | |
609 | except RepositoryError as e: |
|
609 | except RepositoryError as e: | |
610 | log.error(traceback.format_exc()) |
|
610 | log.error(traceback.format_exc()) | |
611 | h.flash(e, category='error') |
|
611 | h.flash(e, category='error') | |
612 | raise webob.exc.HTTPBadRequest() |
|
612 | raise webob.exc.HTTPBadRequest() | |
613 |
|
613 | |||
614 |
|
614 | |||
615 | @decorator.decorator |
|
615 | @decorator.decorator | |
616 | def jsonify(func, *args, **kwargs): |
|
616 | def jsonify(func, *args, **kwargs): | |
617 | """Action decorator that formats output for JSON |
|
617 | """Action decorator that formats output for JSON | |
618 |
|
618 | |||
619 | Given a function that will return content, this decorator will turn |
|
619 | Given a function that will return content, this decorator will turn | |
620 | the result into JSON, with a content-type of 'application/json' and |
|
620 | the result into JSON, with a content-type of 'application/json' and | |
621 | output it. |
|
621 | output it. | |
622 | """ |
|
622 | """ | |
623 | response.headers['Content-Type'] = 'application/json; charset=utf-8' |
|
623 | response.headers['Content-Type'] = 'application/json; charset=utf-8' | |
624 | data = func(*args, **kwargs) |
|
624 | data = func(*args, **kwargs) | |
625 | if isinstance(data, (list, tuple)): |
|
625 | if isinstance(data, (list, tuple)): | |
626 | # A JSON list response is syntactically valid JavaScript and can be |
|
626 | # A JSON list response is syntactically valid JavaScript and can be | |
627 | # loaded and executed as JavaScript by a malicious third-party site |
|
627 | # loaded and executed as JavaScript by a malicious third-party site | |
628 | # using <script>, which can lead to cross-site data leaks. |
|
628 | # using <script>, which can lead to cross-site data leaks. | |
629 | # JSON responses should therefore be scalars or objects (i.e. Python |
|
629 | # JSON responses should therefore be scalars or objects (i.e. Python | |
630 | # dicts), because a JSON object is a syntax error if intepreted as JS. |
|
630 | # dicts), because a JSON object is a syntax error if intepreted as JS. | |
631 | msg = "JSON responses with Array envelopes are susceptible to " \ |
|
631 | msg = "JSON responses with Array envelopes are susceptible to " \ | |
632 | "cross-site data leak attacks, see " \ |
|
632 | "cross-site data leak attacks, see " \ | |
633 | "https://web.archive.org/web/20120519231904/http://wiki.pylonshq.com/display/pylonsfaq/Warnings" |
|
633 | "https://web.archive.org/web/20120519231904/http://wiki.pylonshq.com/display/pylonsfaq/Warnings" | |
634 | warnings.warn(msg, Warning, 2) |
|
634 | warnings.warn(msg, Warning, 2) | |
635 | log.warning(msg) |
|
635 | log.warning(msg) | |
636 | log.debug("Returning JSON wrapped action output") |
|
636 | log.debug("Returning JSON wrapped action output") | |
637 | return ascii_bytes(ext_json.dumps(data)) |
|
637 | return ascii_bytes(ext_json.dumps(data)) | |
638 |
|
638 | |||
639 | @decorator.decorator |
|
639 | @decorator.decorator | |
640 | def IfSshEnabled(func, *args, **kwargs): |
|
640 | def IfSshEnabled(func, *args, **kwargs): | |
641 | """Decorator for functions that can only be called if SSH access is enabled. |
|
641 | """Decorator for functions that can only be called if SSH access is enabled. | |
642 |
|
642 | |||
643 | If SSH access is disabled in the configuration file, HTTPNotFound is raised. |
|
643 | If SSH access is disabled in the configuration file, HTTPNotFound is raised. | |
644 | """ |
|
644 | """ | |
645 | if not c.ssh_enabled: |
|
645 | if not c.ssh_enabled: | |
646 | from kallithea.lib import helpers as h |
|
646 | from kallithea.lib import helpers as h | |
647 | h.flash(_("SSH access is disabled."), category='warning') |
|
647 | h.flash(_("SSH access is disabled."), category='warning') | |
648 | raise webob.exc.HTTPNotFound() |
|
648 | raise webob.exc.HTTPNotFound() | |
649 | return func(*args, **kwargs) |
|
649 | return func(*args, **kwargs) |
@@ -1,413 +1,406 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.hooks |
|
15 | kallithea.lib.hooks | |
16 | ~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Hooks run by Kallithea |
|
18 | Hooks run by Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Aug 6, 2010 |
|
22 | :created_on: Aug 6, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import os |
|
28 | import os | |
29 | import sys |
|
29 | import sys | |
30 | import time |
|
30 | import time | |
31 |
|
31 | |||
32 | import mercurial.scmutil |
|
32 | import mercurial.scmutil | |
33 |
|
33 | |||
|
34 | import kallithea | |||
34 | from kallithea.lib import helpers as h |
|
35 | from kallithea.lib import helpers as h | |
35 | from kallithea.lib.exceptions import UserCreationError |
|
36 | from kallithea.lib.exceptions import UserCreationError | |
36 | from kallithea.lib.utils import action_logger, make_ui |
|
37 | from kallithea.lib.utils import action_logger, make_ui | |
37 | from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str |
|
38 | from kallithea.lib.utils2 import HookEnvironmentError, ascii_str, get_hook_environment, safe_bytes, safe_str | |
38 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
39 | from kallithea.lib.vcs.backends.base import EmptyChangeset | |
39 | from kallithea.model.db import Repository, User |
|
40 | from kallithea.model.db import Repository, User | |
40 |
|
41 | |||
41 |
|
42 | |||
42 | def _get_scm_size(alias, root_path): |
|
43 | def _get_scm_size(alias, root_path): | |
43 | if not alias.startswith('.'): |
|
44 | if not alias.startswith('.'): | |
44 | alias += '.' |
|
45 | alias += '.' | |
45 |
|
46 | |||
46 | size_scm, size_root = 0, 0 |
|
47 | size_scm, size_root = 0, 0 | |
47 | for path, dirs, files in os.walk(root_path): |
|
48 | for path, dirs, files in os.walk(root_path): | |
48 | if path.find(alias) != -1: |
|
49 | if path.find(alias) != -1: | |
49 | for f in files: |
|
50 | for f in files: | |
50 | try: |
|
51 | try: | |
51 | size_scm += os.path.getsize(os.path.join(path, f)) |
|
52 | size_scm += os.path.getsize(os.path.join(path, f)) | |
52 | except OSError: |
|
53 | except OSError: | |
53 | pass |
|
54 | pass | |
54 | else: |
|
55 | else: | |
55 | for f in files: |
|
56 | for f in files: | |
56 | try: |
|
57 | try: | |
57 | size_root += os.path.getsize(os.path.join(path, f)) |
|
58 | size_root += os.path.getsize(os.path.join(path, f)) | |
58 | except OSError: |
|
59 | except OSError: | |
59 | pass |
|
60 | pass | |
60 |
|
61 | |||
61 | size_scm_f = h.format_byte_size(size_scm) |
|
62 | size_scm_f = h.format_byte_size(size_scm) | |
62 | size_root_f = h.format_byte_size(size_root) |
|
63 | size_root_f = h.format_byte_size(size_root) | |
63 | size_total_f = h.format_byte_size(size_root + size_scm) |
|
64 | size_total_f = h.format_byte_size(size_root + size_scm) | |
64 |
|
65 | |||
65 | return size_scm_f, size_root_f, size_total_f |
|
66 | return size_scm_f, size_root_f, size_total_f | |
66 |
|
67 | |||
67 |
|
68 | |||
68 | def repo_size(ui, repo, hooktype=None, **kwargs): |
|
69 | def repo_size(ui, repo, hooktype=None, **kwargs): | |
69 | """Show size of Mercurial repository. |
|
70 | """Show size of Mercurial repository. | |
70 |
|
71 | |||
71 | Called as Mercurial hook changegroup.repo_size after push. |
|
72 | Called as Mercurial hook changegroup.repo_size after push. | |
72 | """ |
|
73 | """ | |
73 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', safe_str(repo.root)) |
|
74 | size_hg_f, size_root_f, size_total_f = _get_scm_size('.hg', safe_str(repo.root)) | |
74 |
|
75 | |||
75 | last_cs = repo[len(repo) - 1] |
|
76 | last_cs = repo[len(repo) - 1] | |
76 |
|
77 | |||
77 | msg = ('Repository size .hg: %s Checkout: %s Total: %s\n' |
|
78 | msg = ('Repository size .hg: %s Checkout: %s Total: %s\n' | |
78 | 'Last revision is now r%s:%s\n') % ( |
|
79 | 'Last revision is now r%s:%s\n') % ( | |
79 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12] |
|
80 | size_hg_f, size_root_f, size_total_f, last_cs.rev(), ascii_str(last_cs.hex())[:12] | |
80 | ) |
|
81 | ) | |
81 | ui.status(safe_bytes(msg)) |
|
82 | ui.status(safe_bytes(msg)) | |
82 |
|
83 | |||
83 |
|
84 | |||
84 | def log_pull_action(ui, repo, **kwargs): |
|
85 | def log_pull_action(ui, repo, **kwargs): | |
85 | """Logs user last pull action |
|
86 | """Logs user last pull action | |
86 |
|
87 | |||
87 | Called as Mercurial hook outgoing.pull_logger or from Kallithea before invoking Git. |
|
88 | Called as Mercurial hook outgoing.pull_logger or from Kallithea before invoking Git. | |
88 |
|
89 | |||
89 | Does *not* use the action from the hook environment but is always 'pull'. |
|
90 | Does *not* use the action from the hook environment but is always 'pull'. | |
90 | """ |
|
91 | """ | |
91 | ex = get_hook_environment() |
|
92 | ex = get_hook_environment() | |
92 |
|
93 | |||
93 | user = User.get_by_username(ex.username) |
|
94 | user = User.get_by_username(ex.username) | |
94 | action = 'pull' |
|
95 | action = 'pull' | |
95 | action_logger(user, action, ex.repository, ex.ip, commit=True) |
|
96 | action_logger(user, action, ex.repository, ex.ip, commit=True) | |
96 | # extension hook call |
|
97 | # extension hook call | |
97 | from kallithea import EXTENSIONS |
|
98 | callback = getattr(kallithea.EXTENSIONS, 'PULL_HOOK', None) | |
98 | callback = getattr(EXTENSIONS, 'PULL_HOOK', None) |
|
|||
99 | if callable(callback): |
|
99 | if callable(callback): | |
100 | kw = {} |
|
100 | kw = {} | |
101 | kw.update(ex) |
|
101 | kw.update(ex) | |
102 | callback(**kw) |
|
102 | callback(**kw) | |
103 |
|
103 | |||
104 |
|
104 | |||
105 | def log_push_action(ui, repo, node, node_last, **kwargs): |
|
105 | def log_push_action(ui, repo, node, node_last, **kwargs): | |
106 | """ |
|
106 | """ | |
107 | Register that changes have been added to the repo - log the action *and* invalidate caches. |
|
107 | Register that changes have been added to the repo - log the action *and* invalidate caches. | |
108 | Note: This hook is not only logging, but also the side effect invalidating |
|
108 | Note: This hook is not only logging, but also the side effect invalidating | |
109 | caches! The function should perhaps be renamed. |
|
109 | caches! The function should perhaps be renamed. | |
110 |
|
110 | |||
111 | Called as Mercurial hook changegroup.kallithea_log_push_action . |
|
111 | Called as Mercurial hook changegroup.kallithea_log_push_action . | |
112 |
|
112 | |||
113 | The pushed changesets is given by the revset 'node:node_last'. |
|
113 | The pushed changesets is given by the revset 'node:node_last'. | |
114 | """ |
|
114 | """ | |
115 | revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])] |
|
115 | revs = [ascii_str(repo[r].hex()) for r in mercurial.scmutil.revrange(repo, [b'%s:%s' % (node, node_last)])] | |
116 | process_pushed_raw_ids(revs) |
|
116 | process_pushed_raw_ids(revs) | |
117 |
|
117 | |||
118 |
|
118 | |||
119 | def process_pushed_raw_ids(revs): |
|
119 | def process_pushed_raw_ids(revs): | |
120 | """ |
|
120 | """ | |
121 | Register that changes have been added to the repo - log the action *and* invalidate caches. |
|
121 | Register that changes have been added to the repo - log the action *and* invalidate caches. | |
122 |
|
122 | |||
123 | Called from Mercurial changegroup.kallithea_log_push_action calling hook log_push_action, |
|
123 | Called from Mercurial changegroup.kallithea_log_push_action calling hook log_push_action, | |
124 | or from the Git post-receive hook calling handle_git_post_receive ... |
|
124 | or from the Git post-receive hook calling handle_git_post_receive ... | |
125 | or from scm _handle_push. |
|
125 | or from scm _handle_push. | |
126 | """ |
|
126 | """ | |
127 | ex = get_hook_environment() |
|
127 | ex = get_hook_environment() | |
128 |
|
128 | |||
129 | action = '%s:%s' % (ex.action, ','.join(revs)) |
|
129 | action = '%s:%s' % (ex.action, ','.join(revs)) | |
130 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) |
|
130 | action_logger(ex.username, action, ex.repository, ex.ip, commit=True) | |
131 |
|
131 | |||
132 | from kallithea.model.scm import ScmModel |
|
132 | from kallithea.model.scm import ScmModel | |
133 | ScmModel().mark_for_invalidation(ex.repository) |
|
133 | ScmModel().mark_for_invalidation(ex.repository) | |
134 |
|
134 | |||
135 | # extension hook call |
|
135 | # extension hook call | |
136 | from kallithea import EXTENSIONS |
|
136 | callback = getattr(kallithea.EXTENSIONS, 'PUSH_HOOK', None) | |
137 | callback = getattr(EXTENSIONS, 'PUSH_HOOK', None) |
|
|||
138 | if callable(callback): |
|
137 | if callable(callback): | |
139 | kw = {'pushed_revs': revs} |
|
138 | kw = {'pushed_revs': revs} | |
140 | kw.update(ex) |
|
139 | kw.update(ex) | |
141 | callback(**kw) |
|
140 | callback(**kw) | |
142 |
|
141 | |||
143 |
|
142 | |||
144 | def log_create_repository(repository_dict, created_by, **kwargs): |
|
143 | def log_create_repository(repository_dict, created_by, **kwargs): | |
145 | """ |
|
144 | """ | |
146 | Post create repository Hook. |
|
145 | Post create repository Hook. | |
147 |
|
146 | |||
148 | :param repository: dict dump of repository object |
|
147 | :param repository: dict dump of repository object | |
149 | :param created_by: username who created repository |
|
148 | :param created_by: username who created repository | |
150 |
|
149 | |||
151 | available keys of repository_dict: |
|
150 | available keys of repository_dict: | |
152 |
|
151 | |||
153 | 'repo_type', |
|
152 | 'repo_type', | |
154 | 'description', |
|
153 | 'description', | |
155 | 'private', |
|
154 | 'private', | |
156 | 'created_on', |
|
155 | 'created_on', | |
157 | 'enable_downloads', |
|
156 | 'enable_downloads', | |
158 | 'repo_id', |
|
157 | 'repo_id', | |
159 | 'owner_id', |
|
158 | 'owner_id', | |
160 | 'enable_statistics', |
|
159 | 'enable_statistics', | |
161 | 'clone_uri', |
|
160 | 'clone_uri', | |
162 | 'fork_id', |
|
161 | 'fork_id', | |
163 | 'group_id', |
|
162 | 'group_id', | |
164 | 'repo_name' |
|
163 | 'repo_name' | |
165 |
|
164 | |||
166 | """ |
|
165 | """ | |
167 | from kallithea import EXTENSIONS |
|
166 | callback = getattr(kallithea.EXTENSIONS, 'CREATE_REPO_HOOK', None) | |
168 | callback = getattr(EXTENSIONS, 'CREATE_REPO_HOOK', None) |
|
|||
169 | if callable(callback): |
|
167 | if callable(callback): | |
170 | kw = {} |
|
168 | kw = {} | |
171 | kw.update(repository_dict) |
|
169 | kw.update(repository_dict) | |
172 | kw.update({'created_by': created_by}) |
|
170 | kw.update({'created_by': created_by}) | |
173 | kw.update(kwargs) |
|
171 | kw.update(kwargs) | |
174 | callback(**kw) |
|
172 | callback(**kw) | |
175 |
|
173 | |||
176 |
|
174 | |||
177 | def check_allowed_create_user(user_dict, created_by, **kwargs): |
|
175 | def check_allowed_create_user(user_dict, created_by, **kwargs): | |
178 | # pre create hooks |
|
176 | # pre create hooks | |
179 | from kallithea import EXTENSIONS |
|
177 | callback = getattr(kallithea.EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) | |
180 | callback = getattr(EXTENSIONS, 'PRE_CREATE_USER_HOOK', None) |
|
|||
181 | if callable(callback): |
|
178 | if callable(callback): | |
182 | allowed, reason = callback(created_by=created_by, **user_dict) |
|
179 | allowed, reason = callback(created_by=created_by, **user_dict) | |
183 | if not allowed: |
|
180 | if not allowed: | |
184 | raise UserCreationError(reason) |
|
181 | raise UserCreationError(reason) | |
185 |
|
182 | |||
186 |
|
183 | |||
187 | def log_create_user(user_dict, created_by, **kwargs): |
|
184 | def log_create_user(user_dict, created_by, **kwargs): | |
188 | """ |
|
185 | """ | |
189 | Post create user Hook. |
|
186 | Post create user Hook. | |
190 |
|
187 | |||
191 | :param user_dict: dict dump of user object |
|
188 | :param user_dict: dict dump of user object | |
192 |
|
189 | |||
193 | available keys for user_dict: |
|
190 | available keys for user_dict: | |
194 |
|
191 | |||
195 | 'username', |
|
192 | 'username', | |
196 | 'full_name_or_username', |
|
193 | 'full_name_or_username', | |
197 | 'full_contact', |
|
194 | 'full_contact', | |
198 | 'user_id', |
|
195 | 'user_id', | |
199 | 'name', |
|
196 | 'name', | |
200 | 'firstname', |
|
197 | 'firstname', | |
201 | 'short_contact', |
|
198 | 'short_contact', | |
202 | 'admin', |
|
199 | 'admin', | |
203 | 'lastname', |
|
200 | 'lastname', | |
204 | 'ip_addresses', |
|
201 | 'ip_addresses', | |
205 | 'ldap_dn', |
|
202 | 'ldap_dn', | |
206 | 'email', |
|
203 | 'email', | |
207 | 'api_key', |
|
204 | 'api_key', | |
208 | 'last_login', |
|
205 | 'last_login', | |
209 | 'full_name', |
|
206 | 'full_name', | |
210 | 'active', |
|
207 | 'active', | |
211 | 'password', |
|
208 | 'password', | |
212 | 'emails', |
|
209 | 'emails', | |
213 |
|
210 | |||
214 | """ |
|
211 | """ | |
215 | from kallithea import EXTENSIONS |
|
212 | callback = getattr(kallithea.EXTENSIONS, 'CREATE_USER_HOOK', None) | |
216 | callback = getattr(EXTENSIONS, 'CREATE_USER_HOOK', None) |
|
|||
217 | if callable(callback): |
|
213 | if callable(callback): | |
218 | callback(created_by=created_by, **user_dict) |
|
214 | callback(created_by=created_by, **user_dict) | |
219 |
|
215 | |||
220 |
|
216 | |||
221 | def log_create_pullrequest(pullrequest_dict, created_by, **kwargs): |
|
217 | def log_create_pullrequest(pullrequest_dict, created_by, **kwargs): | |
222 | """ |
|
218 | """ | |
223 | Post create pull request hook. |
|
219 | Post create pull request hook. | |
224 |
|
220 | |||
225 | :param pullrequest_dict: dict dump of pull request object |
|
221 | :param pullrequest_dict: dict dump of pull request object | |
226 | """ |
|
222 | """ | |
227 | from kallithea import EXTENSIONS |
|
223 | callback = getattr(kallithea.EXTENSIONS, 'CREATE_PULLREQUEST_HOOK', None) | |
228 | callback = getattr(EXTENSIONS, 'CREATE_PULLREQUEST_HOOK', None) |
|
|||
229 | if callable(callback): |
|
224 | if callable(callback): | |
230 | return callback(created_by=created_by, **pullrequest_dict) |
|
225 | return callback(created_by=created_by, **pullrequest_dict) | |
231 |
|
226 | |||
232 | return 0 |
|
227 | return 0 | |
233 |
|
228 | |||
234 | def log_delete_repository(repository_dict, deleted_by, **kwargs): |
|
229 | def log_delete_repository(repository_dict, deleted_by, **kwargs): | |
235 | """ |
|
230 | """ | |
236 | Post delete repository Hook. |
|
231 | Post delete repository Hook. | |
237 |
|
232 | |||
238 | :param repository: dict dump of repository object |
|
233 | :param repository: dict dump of repository object | |
239 | :param deleted_by: username who deleted the repository |
|
234 | :param deleted_by: username who deleted the repository | |
240 |
|
235 | |||
241 | available keys of repository_dict: |
|
236 | available keys of repository_dict: | |
242 |
|
237 | |||
243 | 'repo_type', |
|
238 | 'repo_type', | |
244 | 'description', |
|
239 | 'description', | |
245 | 'private', |
|
240 | 'private', | |
246 | 'created_on', |
|
241 | 'created_on', | |
247 | 'enable_downloads', |
|
242 | 'enable_downloads', | |
248 | 'repo_id', |
|
243 | 'repo_id', | |
249 | 'owner_id', |
|
244 | 'owner_id', | |
250 | 'enable_statistics', |
|
245 | 'enable_statistics', | |
251 | 'clone_uri', |
|
246 | 'clone_uri', | |
252 | 'fork_id', |
|
247 | 'fork_id', | |
253 | 'group_id', |
|
248 | 'group_id', | |
254 | 'repo_name' |
|
249 | 'repo_name' | |
255 |
|
250 | |||
256 | """ |
|
251 | """ | |
257 | from kallithea import EXTENSIONS |
|
252 | callback = getattr(kallithea.EXTENSIONS, 'DELETE_REPO_HOOK', None) | |
258 | callback = getattr(EXTENSIONS, 'DELETE_REPO_HOOK', None) |
|
|||
259 | if callable(callback): |
|
253 | if callable(callback): | |
260 | kw = {} |
|
254 | kw = {} | |
261 | kw.update(repository_dict) |
|
255 | kw.update(repository_dict) | |
262 | kw.update({'deleted_by': deleted_by, |
|
256 | kw.update({'deleted_by': deleted_by, | |
263 | 'deleted_on': time.time()}) |
|
257 | 'deleted_on': time.time()}) | |
264 | kw.update(kwargs) |
|
258 | kw.update(kwargs) | |
265 | callback(**kw) |
|
259 | callback(**kw) | |
266 |
|
260 | |||
267 |
|
261 | |||
268 | def log_delete_user(user_dict, deleted_by, **kwargs): |
|
262 | def log_delete_user(user_dict, deleted_by, **kwargs): | |
269 | """ |
|
263 | """ | |
270 | Post delete user Hook. |
|
264 | Post delete user Hook. | |
271 |
|
265 | |||
272 | :param user_dict: dict dump of user object |
|
266 | :param user_dict: dict dump of user object | |
273 |
|
267 | |||
274 | available keys for user_dict: |
|
268 | available keys for user_dict: | |
275 |
|
269 | |||
276 | 'username', |
|
270 | 'username', | |
277 | 'full_name_or_username', |
|
271 | 'full_name_or_username', | |
278 | 'full_contact', |
|
272 | 'full_contact', | |
279 | 'user_id', |
|
273 | 'user_id', | |
280 | 'name', |
|
274 | 'name', | |
281 | 'firstname', |
|
275 | 'firstname', | |
282 | 'short_contact', |
|
276 | 'short_contact', | |
283 | 'admin', |
|
277 | 'admin', | |
284 | 'lastname', |
|
278 | 'lastname', | |
285 | 'ip_addresses', |
|
279 | 'ip_addresses', | |
286 | 'ldap_dn', |
|
280 | 'ldap_dn', | |
287 | 'email', |
|
281 | 'email', | |
288 | 'api_key', |
|
282 | 'api_key', | |
289 | 'last_login', |
|
283 | 'last_login', | |
290 | 'full_name', |
|
284 | 'full_name', | |
291 | 'active', |
|
285 | 'active', | |
292 | 'password', |
|
286 | 'password', | |
293 | 'emails', |
|
287 | 'emails', | |
294 |
|
288 | |||
295 | """ |
|
289 | """ | |
296 | from kallithea import EXTENSIONS |
|
290 | callback = getattr(kallithea.EXTENSIONS, 'DELETE_USER_HOOK', None) | |
297 | callback = getattr(EXTENSIONS, 'DELETE_USER_HOOK', None) |
|
|||
298 | if callable(callback): |
|
291 | if callable(callback): | |
299 | callback(deleted_by=deleted_by, **user_dict) |
|
292 | callback(deleted_by=deleted_by, **user_dict) | |
300 |
|
293 | |||
301 |
|
294 | |||
302 | def _hook_environment(repo_path): |
|
295 | def _hook_environment(repo_path): | |
303 | """ |
|
296 | """ | |
304 | Create a light-weight environment for stand-alone scripts and return an UI and the |
|
297 | Create a light-weight environment for stand-alone scripts and return an UI and the | |
305 | db repository. |
|
298 | db repository. | |
306 |
|
299 | |||
307 | Git hooks are executed as subprocess of Git while Kallithea is waiting, and |
|
300 | Git hooks are executed as subprocess of Git while Kallithea is waiting, and | |
308 | they thus need enough info to be able to create an app environment and |
|
301 | they thus need enough info to be able to create an app environment and | |
309 | connect to the database. |
|
302 | connect to the database. | |
310 | """ |
|
303 | """ | |
311 | import paste.deploy |
|
304 | import paste.deploy | |
312 |
|
305 | |||
313 | import kallithea.config.application |
|
306 | import kallithea.config.application | |
314 |
|
307 | |||
315 | extras = get_hook_environment() |
|
308 | extras = get_hook_environment() | |
316 |
|
309 | |||
317 | path_to_ini_file = extras['config'] |
|
310 | path_to_ini_file = extras['config'] | |
318 | config = paste.deploy.appconfig('config:' + path_to_ini_file) |
|
311 | config = paste.deploy.appconfig('config:' + path_to_ini_file) | |
319 | #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging |
|
312 | #logging.config.fileConfig(ini_file_path) # Note: we are in a different process - don't use configured logging | |
320 | kallithea.config.application.make_app(config.global_conf, **config.local_conf) |
|
313 | kallithea.config.application.make_app(config.global_conf, **config.local_conf) | |
321 |
|
314 | |||
322 | # fix if it's not a bare repo |
|
315 | # fix if it's not a bare repo | |
323 | if repo_path.endswith(os.sep + '.git'): |
|
316 | if repo_path.endswith(os.sep + '.git'): | |
324 | repo_path = repo_path[:-5] |
|
317 | repo_path = repo_path[:-5] | |
325 |
|
318 | |||
326 | repo = Repository.get_by_full_path(repo_path) |
|
319 | repo = Repository.get_by_full_path(repo_path) | |
327 | if not repo: |
|
320 | if not repo: | |
328 | raise OSError('Repository %s not found in database' % repo_path) |
|
321 | raise OSError('Repository %s not found in database' % repo_path) | |
329 |
|
322 | |||
330 | baseui = make_ui() |
|
323 | baseui = make_ui() | |
331 | return baseui, repo |
|
324 | return baseui, repo | |
332 |
|
325 | |||
333 |
|
326 | |||
334 | def handle_git_pre_receive(repo_path, git_stdin_lines): |
|
327 | def handle_git_pre_receive(repo_path, git_stdin_lines): | |
335 | """Called from Git pre-receive hook. |
|
328 | """Called from Git pre-receive hook. | |
336 | The returned value is used as hook exit code and must be 0. |
|
329 | The returned value is used as hook exit code and must be 0. | |
337 | """ |
|
330 | """ | |
338 | # Currently unused. TODO: remove? |
|
331 | # Currently unused. TODO: remove? | |
339 | return 0 |
|
332 | return 0 | |
340 |
|
333 | |||
341 |
|
334 | |||
342 | def handle_git_post_receive(repo_path, git_stdin_lines): |
|
335 | def handle_git_post_receive(repo_path, git_stdin_lines): | |
343 | """Called from Git post-receive hook. |
|
336 | """Called from Git post-receive hook. | |
344 | The returned value is used as hook exit code and must be 0. |
|
337 | The returned value is used as hook exit code and must be 0. | |
345 | """ |
|
338 | """ | |
346 | try: |
|
339 | try: | |
347 | baseui, repo = _hook_environment(repo_path) |
|
340 | baseui, repo = _hook_environment(repo_path) | |
348 | except HookEnvironmentError as e: |
|
341 | except HookEnvironmentError as e: | |
349 | sys.stderr.write("Skipping Kallithea Git post-recieve hook %r.\nGit was apparently not invoked by Kallithea: %s\n" % (sys.argv[0], e)) |
|
342 | sys.stderr.write("Skipping Kallithea Git post-recieve hook %r.\nGit was apparently not invoked by Kallithea: %s\n" % (sys.argv[0], e)) | |
350 | return 0 |
|
343 | return 0 | |
351 |
|
344 | |||
352 | # the post push hook should never use the cached instance |
|
345 | # the post push hook should never use the cached instance | |
353 | scm_repo = repo.scm_instance_no_cache() |
|
346 | scm_repo = repo.scm_instance_no_cache() | |
354 |
|
347 | |||
355 | rev_data = [] |
|
348 | rev_data = [] | |
356 | for l in git_stdin_lines: |
|
349 | for l in git_stdin_lines: | |
357 | old_rev, new_rev, ref = l.strip().split(' ') |
|
350 | old_rev, new_rev, ref = l.strip().split(' ') | |
358 | _ref_data = ref.split('/') |
|
351 | _ref_data = ref.split('/') | |
359 | if _ref_data[1] in ['tags', 'heads']: |
|
352 | if _ref_data[1] in ['tags', 'heads']: | |
360 | rev_data.append({'old_rev': old_rev, |
|
353 | rev_data.append({'old_rev': old_rev, | |
361 | 'new_rev': new_rev, |
|
354 | 'new_rev': new_rev, | |
362 | 'ref': ref, |
|
355 | 'ref': ref, | |
363 | 'type': _ref_data[1], |
|
356 | 'type': _ref_data[1], | |
364 | 'name': '/'.join(_ref_data[2:])}) |
|
357 | 'name': '/'.join(_ref_data[2:])}) | |
365 |
|
358 | |||
366 | git_revs = [] |
|
359 | git_revs = [] | |
367 | for push_ref in rev_data: |
|
360 | for push_ref in rev_data: | |
368 | _type = push_ref['type'] |
|
361 | _type = push_ref['type'] | |
369 | if _type == 'heads': |
|
362 | if _type == 'heads': | |
370 | if push_ref['old_rev'] == EmptyChangeset().raw_id: |
|
363 | if push_ref['old_rev'] == EmptyChangeset().raw_id: | |
371 | # update the symbolic ref if we push new repo |
|
364 | # update the symbolic ref if we push new repo | |
372 | if scm_repo.is_empty(): |
|
365 | if scm_repo.is_empty(): | |
373 | scm_repo._repo.refs.set_symbolic_ref( |
|
366 | scm_repo._repo.refs.set_symbolic_ref( | |
374 | b'HEAD', |
|
367 | b'HEAD', | |
375 | b'refs/heads/%s' % safe_bytes(push_ref['name'])) |
|
368 | b'refs/heads/%s' % safe_bytes(push_ref['name'])) | |
376 |
|
369 | |||
377 | # build exclude list without the ref |
|
370 | # build exclude list without the ref | |
378 | cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*'] |
|
371 | cmd = ['for-each-ref', '--format=%(refname)', 'refs/heads/*'] | |
379 | stdout = scm_repo.run_git_command(cmd) |
|
372 | stdout = scm_repo.run_git_command(cmd) | |
380 | ref = push_ref['ref'] |
|
373 | ref = push_ref['ref'] | |
381 | heads = [head for head in stdout.splitlines() if head != ref] |
|
374 | heads = [head for head in stdout.splitlines() if head != ref] | |
382 | # now list the git revs while excluding from the list |
|
375 | # now list the git revs while excluding from the list | |
383 | cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H'] |
|
376 | cmd = ['log', push_ref['new_rev'], '--reverse', '--pretty=format:%H'] | |
384 | cmd.append('--not') |
|
377 | cmd.append('--not') | |
385 | cmd.extend(heads) # empty list is ok |
|
378 | cmd.extend(heads) # empty list is ok | |
386 | stdout = scm_repo.run_git_command(cmd) |
|
379 | stdout = scm_repo.run_git_command(cmd) | |
387 | git_revs += stdout.splitlines() |
|
380 | git_revs += stdout.splitlines() | |
388 |
|
381 | |||
389 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: |
|
382 | elif push_ref['new_rev'] == EmptyChangeset().raw_id: | |
390 | # delete branch case |
|
383 | # delete branch case | |
391 | git_revs += ['delete_branch=>%s' % push_ref['name']] |
|
384 | git_revs += ['delete_branch=>%s' % push_ref['name']] | |
392 | else: |
|
385 | else: | |
393 | cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref, |
|
386 | cmd = ['log', '%(old_rev)s..%(new_rev)s' % push_ref, | |
394 | '--reverse', '--pretty=format:%H'] |
|
387 | '--reverse', '--pretty=format:%H'] | |
395 | stdout = scm_repo.run_git_command(cmd) |
|
388 | stdout = scm_repo.run_git_command(cmd) | |
396 | git_revs += stdout.splitlines() |
|
389 | git_revs += stdout.splitlines() | |
397 |
|
390 | |||
398 | elif _type == 'tags': |
|
391 | elif _type == 'tags': | |
399 | git_revs += ['tag=>%s' % push_ref['name']] |
|
392 | git_revs += ['tag=>%s' % push_ref['name']] | |
400 |
|
393 | |||
401 | process_pushed_raw_ids(git_revs) |
|
394 | process_pushed_raw_ids(git_revs) | |
402 |
|
395 | |||
403 | return 0 |
|
396 | return 0 | |
404 |
|
397 | |||
405 |
|
398 | |||
406 | # Almost exactly like Mercurial contrib/hg-ssh: |
|
399 | # Almost exactly like Mercurial contrib/hg-ssh: | |
407 | def rejectpush(ui, **kwargs): |
|
400 | def rejectpush(ui, **kwargs): | |
408 | """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos. |
|
401 | """Mercurial hook to be installed as pretxnopen and prepushkey for read-only repos. | |
409 | Return value 1 will make the hook fail and reject the push. |
|
402 | Return value 1 will make the hook fail and reject the push. | |
410 | """ |
|
403 | """ | |
411 | ex = get_hook_environment() |
|
404 | ex = get_hook_environment() | |
412 | ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository)) |
|
405 | ui.warn(safe_bytes("Push access to %r denied\n" % ex.repository)) | |
413 | return 1 |
|
406 | return 1 |
@@ -1,519 +1,519 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.utils2 |
|
15 | kallithea.lib.utils2 | |
16 | ~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Some simple helper functions. |
|
18 | Some simple helper functions. | |
19 | Note: all these functions should be independent of Kallithea classes, i.e. |
|
19 | Note: all these functions should be independent of Kallithea classes, i.e. | |
20 | models, controllers, etc. to prevent import cycles. |
|
20 | models, controllers, etc. to prevent import cycles. | |
21 |
|
21 | |||
22 | This file was forked by the Kallithea project in July 2014. |
|
22 | This file was forked by the Kallithea project in July 2014. | |
23 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | Original author and date, and relevant copyright and licensing information is below: | |
24 | :created_on: Jan 5, 2011 |
|
24 | :created_on: Jan 5, 2011 | |
25 | :author: marcink |
|
25 | :author: marcink | |
26 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
27 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | :license: GPLv3, see LICENSE.md for more details. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | import binascii |
|
30 | import binascii | |
31 | import datetime |
|
31 | import datetime | |
32 | import json |
|
32 | import json | |
33 | import os |
|
33 | import os | |
34 | import re |
|
34 | import re | |
35 | import time |
|
35 | import time | |
36 | import urllib.parse |
|
36 | import urllib.parse | |
37 |
|
37 | |||
38 | import urlobject |
|
38 | import urlobject | |
39 | from tg.i18n import ugettext as _ |
|
39 | from tg.i18n import ugettext as _ | |
40 | from tg.i18n import ungettext |
|
40 | from tg.i18n import ungettext | |
41 | from tg.support.converters import asbool, aslist |
|
41 | from tg.support.converters import asbool, aslist | |
42 | from webhelpers2.text import collapse, remove_formatting, strip_tags |
|
42 | from webhelpers2.text import collapse, remove_formatting, strip_tags | |
43 |
|
43 | |||
|
44 | import kallithea | |||
44 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export |
|
45 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export | |
45 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
46 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
46 |
|
47 | |||
47 |
|
48 | |||
48 | try: |
|
49 | try: | |
49 | import pwd |
|
50 | import pwd | |
50 | except ImportError: |
|
51 | except ImportError: | |
51 | pass |
|
52 | pass | |
52 |
|
53 | |||
53 |
|
54 | |||
54 | # mute pyflakes "imported but unused" |
|
55 | # mute pyflakes "imported but unused" | |
55 | assert asbool |
|
56 | assert asbool | |
56 | assert aslist |
|
57 | assert aslist | |
57 | assert ascii_bytes |
|
58 | assert ascii_bytes | |
58 | assert ascii_str |
|
59 | assert ascii_str | |
59 | assert safe_bytes |
|
60 | assert safe_bytes | |
60 | assert safe_str |
|
61 | assert safe_str | |
61 | assert LazyProperty |
|
62 | assert LazyProperty | |
62 |
|
63 | |||
63 |
|
64 | |||
64 | def convert_line_endings(line, mode): |
|
65 | def convert_line_endings(line, mode): | |
65 | """ |
|
66 | """ | |
66 | Converts a given line "line end" according to given mode |
|
67 | Converts a given line "line end" according to given mode | |
67 |
|
68 | |||
68 | Available modes are:: |
|
69 | Available modes are:: | |
69 | 0 - Unix |
|
70 | 0 - Unix | |
70 | 1 - Mac |
|
71 | 1 - Mac | |
71 | 2 - DOS |
|
72 | 2 - DOS | |
72 |
|
73 | |||
73 | :param line: given line to convert |
|
74 | :param line: given line to convert | |
74 | :param mode: mode to convert to |
|
75 | :param mode: mode to convert to | |
75 | :rtype: str |
|
76 | :rtype: str | |
76 | :return: converted line according to mode |
|
77 | :return: converted line according to mode | |
77 | """ |
|
78 | """ | |
78 | if mode == 0: |
|
79 | if mode == 0: | |
79 | line = line.replace('\r\n', '\n') |
|
80 | line = line.replace('\r\n', '\n') | |
80 | line = line.replace('\r', '\n') |
|
81 | line = line.replace('\r', '\n') | |
81 | elif mode == 1: |
|
82 | elif mode == 1: | |
82 | line = line.replace('\r\n', '\r') |
|
83 | line = line.replace('\r\n', '\r') | |
83 | line = line.replace('\n', '\r') |
|
84 | line = line.replace('\n', '\r') | |
84 | elif mode == 2: |
|
85 | elif mode == 2: | |
85 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) |
|
86 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) | |
86 | return line |
|
87 | return line | |
87 |
|
88 | |||
88 |
|
89 | |||
89 | def detect_mode(line, default): |
|
90 | def detect_mode(line, default): | |
90 | """ |
|
91 | """ | |
91 | Detects line break for given line, if line break couldn't be found |
|
92 | Detects line break for given line, if line break couldn't be found | |
92 | given default value is returned |
|
93 | given default value is returned | |
93 |
|
94 | |||
94 | :param line: str line |
|
95 | :param line: str line | |
95 | :param default: default |
|
96 | :param default: default | |
96 | :rtype: int |
|
97 | :rtype: int | |
97 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
98 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS | |
98 | """ |
|
99 | """ | |
99 | if line.endswith('\r\n'): |
|
100 | if line.endswith('\r\n'): | |
100 | return 2 |
|
101 | return 2 | |
101 | elif line.endswith('\n'): |
|
102 | elif line.endswith('\n'): | |
102 | return 0 |
|
103 | return 0 | |
103 | elif line.endswith('\r'): |
|
104 | elif line.endswith('\r'): | |
104 | return 1 |
|
105 | return 1 | |
105 | else: |
|
106 | else: | |
106 | return default |
|
107 | return default | |
107 |
|
108 | |||
108 |
|
109 | |||
109 | def generate_api_key(): |
|
110 | def generate_api_key(): | |
110 | """ |
|
111 | """ | |
111 | Generates a random (presumably unique) API key. |
|
112 | Generates a random (presumably unique) API key. | |
112 |
|
113 | |||
113 | This value is used in URLs and "Bearer" HTTP Authorization headers, |
|
114 | This value is used in URLs and "Bearer" HTTP Authorization headers, | |
114 | which in practice means it should only contain URL-safe characters |
|
115 | which in practice means it should only contain URL-safe characters | |
115 | (RFC 3986): |
|
116 | (RFC 3986): | |
116 |
|
117 | |||
117 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" |
|
118 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" | |
118 | """ |
|
119 | """ | |
119 | # Hexadecimal certainly qualifies as URL-safe. |
|
120 | # Hexadecimal certainly qualifies as URL-safe. | |
120 | return ascii_str(binascii.hexlify(os.urandom(20))) |
|
121 | return ascii_str(binascii.hexlify(os.urandom(20))) | |
121 |
|
122 | |||
122 |
|
123 | |||
123 | def safe_int(val, default=None): |
|
124 | def safe_int(val, default=None): | |
124 | """ |
|
125 | """ | |
125 | Returns int() of val if val is not convertable to int use default |
|
126 | Returns int() of val if val is not convertable to int use default | |
126 | instead |
|
127 | instead | |
127 |
|
128 | |||
128 | :param val: |
|
129 | :param val: | |
129 | :param default: |
|
130 | :param default: | |
130 | """ |
|
131 | """ | |
131 | try: |
|
132 | try: | |
132 | val = int(val) |
|
133 | val = int(val) | |
133 | except (ValueError, TypeError): |
|
134 | except (ValueError, TypeError): | |
134 | val = default |
|
135 | val = default | |
135 | return val |
|
136 | return val | |
136 |
|
137 | |||
137 |
|
138 | |||
138 | def remove_suffix(s, suffix): |
|
139 | def remove_suffix(s, suffix): | |
139 | if s.endswith(suffix): |
|
140 | if s.endswith(suffix): | |
140 | s = s[:-1 * len(suffix)] |
|
141 | s = s[:-1 * len(suffix)] | |
141 | return s |
|
142 | return s | |
142 |
|
143 | |||
143 |
|
144 | |||
144 | def remove_prefix(s, prefix): |
|
145 | def remove_prefix(s, prefix): | |
145 | if s.startswith(prefix): |
|
146 | if s.startswith(prefix): | |
146 | s = s[len(prefix):] |
|
147 | s = s[len(prefix):] | |
147 | return s |
|
148 | return s | |
148 |
|
149 | |||
149 |
|
150 | |||
150 | def age(prevdate, show_short_version=False, now=None): |
|
151 | def age(prevdate, show_short_version=False, now=None): | |
151 | """ |
|
152 | """ | |
152 | turns a datetime into an age string. |
|
153 | turns a datetime into an age string. | |
153 | If show_short_version is True, then it will generate a not so accurate but shorter string, |
|
154 | If show_short_version is True, then it will generate a not so accurate but shorter string, | |
154 | example: 2days ago, instead of 2 days and 23 hours ago. |
|
155 | example: 2days ago, instead of 2 days and 23 hours ago. | |
155 |
|
156 | |||
156 | :param prevdate: datetime object |
|
157 | :param prevdate: datetime object | |
157 | :param show_short_version: if it should approximate the date and return a shorter string |
|
158 | :param show_short_version: if it should approximate the date and return a shorter string | |
158 | :rtype: str |
|
159 | :rtype: str | |
159 | :returns: str words describing age |
|
160 | :returns: str words describing age | |
160 | """ |
|
161 | """ | |
161 | now = now or datetime.datetime.now() |
|
162 | now = now or datetime.datetime.now() | |
162 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
163 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] | |
163 | deltas = {} |
|
164 | deltas = {} | |
164 | future = False |
|
165 | future = False | |
165 |
|
166 | |||
166 | if prevdate > now: |
|
167 | if prevdate > now: | |
167 | now, prevdate = prevdate, now |
|
168 | now, prevdate = prevdate, now | |
168 | future = True |
|
169 | future = True | |
169 | if future: |
|
170 | if future: | |
170 | prevdate = prevdate.replace(microsecond=0) |
|
171 | prevdate = prevdate.replace(microsecond=0) | |
171 | # Get date parts deltas |
|
172 | # Get date parts deltas | |
172 | from dateutil import relativedelta |
|
173 | from dateutil import relativedelta | |
173 | for part in order: |
|
174 | for part in order: | |
174 | d = relativedelta.relativedelta(now, prevdate) |
|
175 | d = relativedelta.relativedelta(now, prevdate) | |
175 | deltas[part] = getattr(d, part + 's') |
|
176 | deltas[part] = getattr(d, part + 's') | |
176 |
|
177 | |||
177 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
178 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, | |
178 | # not 1 hour, -59 minutes and -59 seconds) |
|
179 | # not 1 hour, -59 minutes and -59 seconds) | |
179 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours |
|
180 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours | |
180 | part = order[num] |
|
181 | part = order[num] | |
181 | carry_part = order[num - 1] |
|
182 | carry_part = order[num - 1] | |
182 |
|
183 | |||
183 | if deltas[part] < 0: |
|
184 | if deltas[part] < 0: | |
184 | deltas[part] += length |
|
185 | deltas[part] += length | |
185 | deltas[carry_part] -= 1 |
|
186 | deltas[carry_part] -= 1 | |
186 |
|
187 | |||
187 | # Same thing for days except that the increment depends on the (variable) |
|
188 | # Same thing for days except that the increment depends on the (variable) | |
188 | # number of days in the month |
|
189 | # number of days in the month | |
189 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
190 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] | |
190 | if deltas['day'] < 0: |
|
191 | if deltas['day'] < 0: | |
191 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and |
|
192 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and | |
192 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) |
|
193 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) | |
193 | ): |
|
194 | ): | |
194 | deltas['day'] += 29 |
|
195 | deltas['day'] += 29 | |
195 | else: |
|
196 | else: | |
196 | deltas['day'] += month_lengths[prevdate.month - 1] |
|
197 | deltas['day'] += month_lengths[prevdate.month - 1] | |
197 |
|
198 | |||
198 | deltas['month'] -= 1 |
|
199 | deltas['month'] -= 1 | |
199 |
|
200 | |||
200 | if deltas['month'] < 0: |
|
201 | if deltas['month'] < 0: | |
201 | deltas['month'] += 12 |
|
202 | deltas['month'] += 12 | |
202 | deltas['year'] -= 1 |
|
203 | deltas['year'] -= 1 | |
203 |
|
204 | |||
204 | # In short version, we want nicer handling of ages of more than a year |
|
205 | # In short version, we want nicer handling of ages of more than a year | |
205 | if show_short_version: |
|
206 | if show_short_version: | |
206 | if deltas['year'] == 1: |
|
207 | if deltas['year'] == 1: | |
207 | # ages between 1 and 2 years: show as months |
|
208 | # ages between 1 and 2 years: show as months | |
208 | deltas['month'] += 12 |
|
209 | deltas['month'] += 12 | |
209 | deltas['year'] = 0 |
|
210 | deltas['year'] = 0 | |
210 | if deltas['year'] >= 2: |
|
211 | if deltas['year'] >= 2: | |
211 | # ages 2+ years: round |
|
212 | # ages 2+ years: round | |
212 | if deltas['month'] > 6: |
|
213 | if deltas['month'] > 6: | |
213 | deltas['year'] += 1 |
|
214 | deltas['year'] += 1 | |
214 | deltas['month'] = 0 |
|
215 | deltas['month'] = 0 | |
215 |
|
216 | |||
216 | # Format the result |
|
217 | # Format the result | |
217 | fmt_funcs = { |
|
218 | fmt_funcs = { | |
218 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, |
|
219 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, | |
219 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, |
|
220 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, | |
220 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, |
|
221 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, | |
221 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, |
|
222 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, | |
222 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, |
|
223 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, | |
223 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, |
|
224 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, | |
224 | } |
|
225 | } | |
225 |
|
226 | |||
226 | for i, part in enumerate(order): |
|
227 | for i, part in enumerate(order): | |
227 | value = deltas[part] |
|
228 | value = deltas[part] | |
228 | if value == 0: |
|
229 | if value == 0: | |
229 | continue |
|
230 | continue | |
230 |
|
231 | |||
231 | if i < 5: |
|
232 | if i < 5: | |
232 | sub_part = order[i + 1] |
|
233 | sub_part = order[i + 1] | |
233 | sub_value = deltas[sub_part] |
|
234 | sub_value = deltas[sub_part] | |
234 | else: |
|
235 | else: | |
235 | sub_value = 0 |
|
236 | sub_value = 0 | |
236 |
|
237 | |||
237 | if sub_value == 0 or show_short_version: |
|
238 | if sub_value == 0 or show_short_version: | |
238 | if future: |
|
239 | if future: | |
239 | return _('in %s') % fmt_funcs[part](value) |
|
240 | return _('in %s') % fmt_funcs[part](value) | |
240 | else: |
|
241 | else: | |
241 | return _('%s ago') % fmt_funcs[part](value) |
|
242 | return _('%s ago') % fmt_funcs[part](value) | |
242 | if future: |
|
243 | if future: | |
243 | return _('in %s and %s') % (fmt_funcs[part](value), |
|
244 | return _('in %s and %s') % (fmt_funcs[part](value), | |
244 | fmt_funcs[sub_part](sub_value)) |
|
245 | fmt_funcs[sub_part](sub_value)) | |
245 | else: |
|
246 | else: | |
246 | return _('%s and %s ago') % (fmt_funcs[part](value), |
|
247 | return _('%s and %s ago') % (fmt_funcs[part](value), | |
247 | fmt_funcs[sub_part](sub_value)) |
|
248 | fmt_funcs[sub_part](sub_value)) | |
248 |
|
249 | |||
249 | return _('just now') |
|
250 | return _('just now') | |
250 |
|
251 | |||
251 |
|
252 | |||
252 | def uri_filter(uri): |
|
253 | def uri_filter(uri): | |
253 | """ |
|
254 | """ | |
254 | Removes user:password from given url string |
|
255 | Removes user:password from given url string | |
255 |
|
256 | |||
256 | :param uri: |
|
257 | :param uri: | |
257 | :rtype: str |
|
258 | :rtype: str | |
258 | :returns: filtered list of strings |
|
259 | :returns: filtered list of strings | |
259 | """ |
|
260 | """ | |
260 | if not uri: |
|
261 | if not uri: | |
261 | return [] |
|
262 | return [] | |
262 |
|
263 | |||
263 | proto = '' |
|
264 | proto = '' | |
264 |
|
265 | |||
265 | for pat in ('https://', 'http://', 'git://'): |
|
266 | for pat in ('https://', 'http://', 'git://'): | |
266 | if uri.startswith(pat): |
|
267 | if uri.startswith(pat): | |
267 | uri = uri[len(pat):] |
|
268 | uri = uri[len(pat):] | |
268 | proto = pat |
|
269 | proto = pat | |
269 | break |
|
270 | break | |
270 |
|
271 | |||
271 | # remove passwords and username |
|
272 | # remove passwords and username | |
272 | uri = uri[uri.find('@') + 1:] |
|
273 | uri = uri[uri.find('@') + 1:] | |
273 |
|
274 | |||
274 | # get the port |
|
275 | # get the port | |
275 | cred_pos = uri.find(':') |
|
276 | cred_pos = uri.find(':') | |
276 | if cred_pos == -1: |
|
277 | if cred_pos == -1: | |
277 | host, port = uri, None |
|
278 | host, port = uri, None | |
278 | else: |
|
279 | else: | |
279 | host, port = uri[:cred_pos], uri[cred_pos + 1:] |
|
280 | host, port = uri[:cred_pos], uri[cred_pos + 1:] | |
280 |
|
281 | |||
281 | return [_f for _f in [proto, host, port] if _f] |
|
282 | return [_f for _f in [proto, host, port] if _f] | |
282 |
|
283 | |||
283 |
|
284 | |||
284 | def credentials_filter(uri): |
|
285 | def credentials_filter(uri): | |
285 | """ |
|
286 | """ | |
286 | Returns a url with removed credentials |
|
287 | Returns a url with removed credentials | |
287 |
|
288 | |||
288 | :param uri: |
|
289 | :param uri: | |
289 | """ |
|
290 | """ | |
290 |
|
291 | |||
291 | uri = uri_filter(uri) |
|
292 | uri = uri_filter(uri) | |
292 | # check if we have port |
|
293 | # check if we have port | |
293 | if len(uri) > 2 and uri[2]: |
|
294 | if len(uri) > 2 and uri[2]: | |
294 | uri[2] = ':' + uri[2] |
|
295 | uri[2] = ':' + uri[2] | |
295 |
|
296 | |||
296 | return ''.join(uri) |
|
297 | return ''.join(uri) | |
297 |
|
298 | |||
298 |
|
299 | |||
299 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): |
|
300 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): | |
300 | parsed_url = urlobject.URLObject(prefix_url) |
|
301 | parsed_url = urlobject.URLObject(prefix_url) | |
301 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) |
|
302 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) | |
302 | try: |
|
303 | try: | |
303 | system_user = pwd.getpwuid(os.getuid()).pw_name |
|
304 | system_user = pwd.getpwuid(os.getuid()).pw_name | |
304 | except NameError: # TODO: support all systems - especially Windows |
|
305 | except NameError: # TODO: support all systems - especially Windows | |
305 | system_user = 'kallithea' # hardcoded default value ... |
|
306 | system_user = 'kallithea' # hardcoded default value ... | |
306 | args = { |
|
307 | args = { | |
307 | 'scheme': parsed_url.scheme, |
|
308 | 'scheme': parsed_url.scheme, | |
308 | 'user': urllib.parse.quote(username or ''), |
|
309 | 'user': urllib.parse.quote(username or ''), | |
309 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") |
|
310 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") | |
310 | 'prefix': prefix, # undocumented, empty or starting with / |
|
311 | 'prefix': prefix, # undocumented, empty or starting with / | |
311 | 'repo': repo_name, |
|
312 | 'repo': repo_name, | |
312 | 'repoid': str(repo_id), |
|
313 | 'repoid': str(repo_id), | |
313 | 'system_user': system_user, |
|
314 | 'system_user': system_user, | |
314 | 'hostname': parsed_url.hostname, |
|
315 | 'hostname': parsed_url.hostname, | |
315 | } |
|
316 | } | |
316 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) |
|
317 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) | |
317 |
|
318 | |||
318 | # remove leading @ sign if it's present. Case of empty user |
|
319 | # remove leading @ sign if it's present. Case of empty user | |
319 | url_obj = urlobject.URLObject(url) |
|
320 | url_obj = urlobject.URLObject(url) | |
320 | if not url_obj.username: |
|
321 | if not url_obj.username: | |
321 | url_obj = url_obj.with_username(None) |
|
322 | url_obj = url_obj.with_username(None) | |
322 |
|
323 | |||
323 | return str(url_obj) |
|
324 | return str(url_obj) | |
324 |
|
325 | |||
325 |
|
326 | |||
326 | def get_changeset_safe(repo, rev): |
|
327 | def get_changeset_safe(repo, rev): | |
327 | """ |
|
328 | """ | |
328 | Safe version of get_changeset if this changeset doesn't exists for a |
|
329 | Safe version of get_changeset if this changeset doesn't exists for a | |
329 | repo it returns a Dummy one instead |
|
330 | repo it returns a Dummy one instead | |
330 |
|
331 | |||
331 | :param repo: |
|
332 | :param repo: | |
332 | :param rev: |
|
333 | :param rev: | |
333 | """ |
|
334 | """ | |
334 | from kallithea.lib.vcs.backends.base import BaseRepository, EmptyChangeset |
|
335 | from kallithea.lib.vcs.backends.base import BaseRepository, EmptyChangeset | |
335 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
336 | from kallithea.lib.vcs.exceptions import RepositoryError | |
336 | if not isinstance(repo, BaseRepository): |
|
337 | if not isinstance(repo, BaseRepository): | |
337 | raise Exception('You must pass an Repository ' |
|
338 | raise Exception('You must pass an Repository ' | |
338 | 'object as first argument got %s' % type(repo)) |
|
339 | 'object as first argument got %s' % type(repo)) | |
339 |
|
340 | |||
340 | try: |
|
341 | try: | |
341 | cs = repo.get_changeset(rev) |
|
342 | cs = repo.get_changeset(rev) | |
342 | except (RepositoryError, LookupError): |
|
343 | except (RepositoryError, LookupError): | |
343 | cs = EmptyChangeset(requested_revision=rev) |
|
344 | cs = EmptyChangeset(requested_revision=rev) | |
344 | return cs |
|
345 | return cs | |
345 |
|
346 | |||
346 |
|
347 | |||
347 | def datetime_to_time(dt): |
|
348 | def datetime_to_time(dt): | |
348 | if dt: |
|
349 | if dt: | |
349 | return time.mktime(dt.timetuple()) |
|
350 | return time.mktime(dt.timetuple()) | |
350 |
|
351 | |||
351 |
|
352 | |||
352 | def time_to_datetime(tm): |
|
353 | def time_to_datetime(tm): | |
353 | if tm: |
|
354 | if tm: | |
354 | if isinstance(tm, str): |
|
355 | if isinstance(tm, str): | |
355 | try: |
|
356 | try: | |
356 | tm = float(tm) |
|
357 | tm = float(tm) | |
357 | except ValueError: |
|
358 | except ValueError: | |
358 | return |
|
359 | return | |
359 | return datetime.datetime.fromtimestamp(tm) |
|
360 | return datetime.datetime.fromtimestamp(tm) | |
360 |
|
361 | |||
361 |
|
362 | |||
362 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() |
|
363 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() | |
363 | # Check char before @ - it must not look like we are in an email addresses. |
|
364 | # Check char before @ - it must not look like we are in an email addresses. | |
364 | # Matching is greedy so we don't have to look beyond the end. |
|
365 | # Matching is greedy so we don't have to look beyond the end. | |
365 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') |
|
366 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') | |
366 |
|
367 | |||
367 |
|
368 | |||
368 | def extract_mentioned_usernames(text): |
|
369 | def extract_mentioned_usernames(text): | |
369 | r""" |
|
370 | r""" | |
370 | Returns list of (possible) usernames @mentioned in given text. |
|
371 | Returns list of (possible) usernames @mentioned in given text. | |
371 |
|
372 | |||
372 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') |
|
373 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') | |
373 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] |
|
374 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] | |
374 | """ |
|
375 | """ | |
375 | return MENTIONS_REGEX.findall(text) |
|
376 | return MENTIONS_REGEX.findall(text) | |
376 |
|
377 | |||
377 |
|
378 | |||
378 | def extract_mentioned_users(text): |
|
379 | def extract_mentioned_users(text): | |
379 | """ Returns set of actual database Users @mentioned in given text. """ |
|
380 | """ Returns set of actual database Users @mentioned in given text. """ | |
380 | from kallithea.model.db import User |
|
381 | from kallithea.model.db import User | |
381 | result = set() |
|
382 | result = set() | |
382 | for name in extract_mentioned_usernames(text): |
|
383 | for name in extract_mentioned_usernames(text): | |
383 | user = User.get_by_username(name, case_insensitive=True) |
|
384 | user = User.get_by_username(name, case_insensitive=True) | |
384 | if user is not None and not user.is_default_user: |
|
385 | if user is not None and not user.is_default_user: | |
385 | result.add(user) |
|
386 | result.add(user) | |
386 | return result |
|
387 | return result | |
387 |
|
388 | |||
388 |
|
389 | |||
389 | class AttributeDict(dict): |
|
390 | class AttributeDict(dict): | |
390 | def __getattr__(self, attr): |
|
391 | def __getattr__(self, attr): | |
391 | return self.get(attr, None) |
|
392 | return self.get(attr, None) | |
392 | __setattr__ = dict.__setitem__ |
|
393 | __setattr__ = dict.__setitem__ | |
393 | __delattr__ = dict.__delitem__ |
|
394 | __delattr__ = dict.__delitem__ | |
394 |
|
395 | |||
395 |
|
396 | |||
396 | def obfuscate_url_pw(engine): |
|
397 | def obfuscate_url_pw(engine): | |
397 | from sqlalchemy.engine import url as sa_url |
|
398 | from sqlalchemy.engine import url as sa_url | |
398 | from sqlalchemy.exc import ArgumentError |
|
399 | from sqlalchemy.exc import ArgumentError | |
399 | try: |
|
400 | try: | |
400 | _url = sa_url.make_url(engine or '') |
|
401 | _url = sa_url.make_url(engine or '') | |
401 | except ArgumentError: |
|
402 | except ArgumentError: | |
402 | return engine |
|
403 | return engine | |
403 | if _url.password: |
|
404 | if _url.password: | |
404 | _url.password = 'XXXXX' |
|
405 | _url.password = 'XXXXX' | |
405 | return str(_url) |
|
406 | return str(_url) | |
406 |
|
407 | |||
407 |
|
408 | |||
408 | class HookEnvironmentError(Exception): pass |
|
409 | class HookEnvironmentError(Exception): pass | |
409 |
|
410 | |||
410 |
|
411 | |||
411 | def get_hook_environment(): |
|
412 | def get_hook_environment(): | |
412 | """ |
|
413 | """ | |
413 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment |
|
414 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment | |
414 | variable. |
|
415 | variable. | |
415 |
|
416 | |||
416 | Called early in Git out-of-process hooks to get .ini config path so the |
|
417 | Called early in Git out-of-process hooks to get .ini config path so the | |
417 | basic environment can be configured properly. Also used in all hooks to get |
|
418 | basic environment can be configured properly. Also used in all hooks to get | |
418 | information about the action that triggered it. |
|
419 | information about the action that triggered it. | |
419 | """ |
|
420 | """ | |
420 |
|
421 | |||
421 | try: |
|
422 | try: | |
422 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] |
|
423 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] | |
423 | except KeyError: |
|
424 | except KeyError: | |
424 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") |
|
425 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") | |
425 |
|
426 | |||
426 | extras = json.loads(kallithea_extras) |
|
427 | extras = json.loads(kallithea_extras) | |
427 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: |
|
428 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: | |
428 | try: |
|
429 | try: | |
429 | extras[k] |
|
430 | extras[k] | |
430 | except KeyError: |
|
431 | except KeyError: | |
431 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) |
|
432 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) | |
432 |
|
433 | |||
433 | return AttributeDict(extras) |
|
434 | return AttributeDict(extras) | |
434 |
|
435 | |||
435 |
|
436 | |||
436 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): |
|
437 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): | |
437 | """Prepare global context for running hooks by serializing data in the |
|
438 | """Prepare global context for running hooks by serializing data in the | |
438 | global KALLITHEA_EXTRAS environment variable. |
|
439 | global KALLITHEA_EXTRAS environment variable. | |
439 |
|
440 | |||
440 | Most importantly, this allow Git hooks to do proper logging and updating of |
|
441 | Most importantly, this allow Git hooks to do proper logging and updating of | |
441 | caches after pushes. |
|
442 | caches after pushes. | |
442 |
|
443 | |||
443 | Must always be called before anything with hooks are invoked. |
|
444 | Must always be called before anything with hooks are invoked. | |
444 | """ |
|
445 | """ | |
445 | from kallithea import CONFIG |
|
|||
446 | extras = { |
|
446 | extras = { | |
447 | 'ip': ip_addr, # used in log_push/pull_action action_logger |
|
447 | 'ip': ip_addr, # used in log_push/pull_action action_logger | |
448 | 'username': username, |
|
448 | 'username': username, | |
449 | 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger |
|
449 | 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger | |
450 | 'repository': repo_name, |
|
450 | 'repository': repo_name, | |
451 | 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids |
|
451 | 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids | |
452 | 'config': CONFIG['__file__'], # used by git hook to read config |
|
452 | 'config': kallithea.CONFIG['__file__'], # used by git hook to read config | |
453 | } |
|
453 | } | |
454 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) |
|
454 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) | |
455 |
|
455 | |||
456 |
|
456 | |||
457 | def get_current_authuser(): |
|
457 | def get_current_authuser(): | |
458 | """ |
|
458 | """ | |
459 | Gets kallithea user from threadlocal tmpl_context variable if it's |
|
459 | Gets kallithea user from threadlocal tmpl_context variable if it's | |
460 | defined, else returns None. |
|
460 | defined, else returns None. | |
461 | """ |
|
461 | """ | |
462 | from tg import tmpl_context |
|
462 | from tg import tmpl_context | |
463 | try: |
|
463 | try: | |
464 | return getattr(tmpl_context, 'authuser', None) |
|
464 | return getattr(tmpl_context, 'authuser', None) | |
465 | except TypeError: # No object (name: context) has been registered for this thread |
|
465 | except TypeError: # No object (name: context) has been registered for this thread | |
466 | return None |
|
466 | return None | |
467 |
|
467 | |||
468 |
|
468 | |||
469 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): |
|
469 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): | |
470 | return _cleanstringsub('_', s).rstrip('_') |
|
470 | return _cleanstringsub('_', s).rstrip('_') | |
471 |
|
471 | |||
472 |
|
472 | |||
473 | def recursive_replace(str_, replace=' '): |
|
473 | def recursive_replace(str_, replace=' '): | |
474 | """ |
|
474 | """ | |
475 | Recursive replace of given sign to just one instance |
|
475 | Recursive replace of given sign to just one instance | |
476 |
|
476 | |||
477 | :param str_: given string |
|
477 | :param str_: given string | |
478 | :param replace: char to find and replace multiple instances |
|
478 | :param replace: char to find and replace multiple instances | |
479 |
|
479 | |||
480 | Examples:: |
|
480 | Examples:: | |
481 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
481 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | |
482 | 'Mighty-Mighty-Bo-sstones' |
|
482 | 'Mighty-Mighty-Bo-sstones' | |
483 | """ |
|
483 | """ | |
484 |
|
484 | |||
485 | if str_.find(replace * 2) == -1: |
|
485 | if str_.find(replace * 2) == -1: | |
486 | return str_ |
|
486 | return str_ | |
487 | else: |
|
487 | else: | |
488 | str_ = str_.replace(replace * 2, replace) |
|
488 | str_ = str_.replace(replace * 2, replace) | |
489 | return recursive_replace(str_, replace) |
|
489 | return recursive_replace(str_, replace) | |
490 |
|
490 | |||
491 |
|
491 | |||
492 | def repo_name_slug(value): |
|
492 | def repo_name_slug(value): | |
493 | """ |
|
493 | """ | |
494 | Return slug of name of repository |
|
494 | Return slug of name of repository | |
495 | This function is called on each creation/modification |
|
495 | This function is called on each creation/modification | |
496 | of repository to prevent bad names in repo |
|
496 | of repository to prevent bad names in repo | |
497 | """ |
|
497 | """ | |
498 |
|
498 | |||
499 | slug = remove_formatting(value) |
|
499 | slug = remove_formatting(value) | |
500 | slug = strip_tags(slug) |
|
500 | slug = strip_tags(slug) | |
501 |
|
501 | |||
502 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
502 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: | |
503 | slug = slug.replace(c, '-') |
|
503 | slug = slug.replace(c, '-') | |
504 | slug = recursive_replace(slug, '-') |
|
504 | slug = recursive_replace(slug, '-') | |
505 | slug = collapse(slug, '-') |
|
505 | slug = collapse(slug, '-') | |
506 | return slug |
|
506 | return slug | |
507 |
|
507 | |||
508 |
|
508 | |||
509 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
509 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): | |
510 | while True: |
|
510 | while True: | |
511 | ok = input(prompt) |
|
511 | ok = input(prompt) | |
512 | if ok in ('y', 'ye', 'yes'): |
|
512 | if ok in ('y', 'ye', 'yes'): | |
513 | return True |
|
513 | return True | |
514 | if ok in ('n', 'no', 'nop', 'nope'): |
|
514 | if ok in ('n', 'no', 'nop', 'nope'): | |
515 | return False |
|
515 | return False | |
516 | retries = retries - 1 |
|
516 | retries = retries - 1 | |
517 | if retries < 0: |
|
517 | if retries < 0: | |
518 | raise IOError |
|
518 | raise IOError | |
519 | print(complaint) |
|
519 | print(complaint) |
@@ -1,564 +1,564 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | these are form validation classes |
|
15 | these are form validation classes | |
16 | http://formencode.org/module-formencode.validators.html |
|
16 | http://formencode.org/module-formencode.validators.html | |
17 | for list of all available validators |
|
17 | for list of all available validators | |
18 |
|
18 | |||
19 | we can create our own validators |
|
19 | we can create our own validators | |
20 |
|
20 | |||
21 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
21 | The table below outlines the options which can be used in a schema in addition to the validators themselves | |
22 | pre_validators [] These validators will be applied before the schema |
|
22 | pre_validators [] These validators will be applied before the schema | |
23 | chained_validators [] These validators will be applied after the schema |
|
23 | chained_validators [] These validators will be applied after the schema | |
24 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
24 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present | |
25 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
25 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed | |
26 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
26 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. | |
27 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
27 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already | |
28 |
|
28 | |||
29 |
|
29 | |||
30 | <name> = formencode.validators.<name of validator> |
|
30 | <name> = formencode.validators.<name of validator> | |
31 | <name> must equal form name |
|
31 | <name> must equal form name | |
32 | list=[1,2,3,4,5] |
|
32 | list=[1,2,3,4,5] | |
33 | for SELECT use formencode.All(OneOf(list), Int()) |
|
33 | for SELECT use formencode.All(OneOf(list), Int()) | |
34 |
|
34 | |||
35 | """ |
|
35 | """ | |
36 | import logging |
|
36 | import logging | |
37 |
|
37 | |||
38 | import formencode |
|
38 | import formencode | |
39 | from formencode import All |
|
39 | from formencode import All | |
40 | from tg.i18n import ugettext as _ |
|
40 | from tg.i18n import ugettext as _ | |
41 |
|
41 | |||
42 | from kallithea import BACKENDS |
|
42 | import kallithea | |
43 | from kallithea.model import validators as v |
|
43 | from kallithea.model import validators as v | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | def LoginForm(): |
|
49 | def LoginForm(): | |
50 | class _LoginForm(formencode.Schema): |
|
50 | class _LoginForm(formencode.Schema): | |
51 | allow_extra_fields = True |
|
51 | allow_extra_fields = True | |
52 | filter_extra_fields = True |
|
52 | filter_extra_fields = True | |
53 | username = v.UnicodeString( |
|
53 | username = v.UnicodeString( | |
54 | strip=True, |
|
54 | strip=True, | |
55 | min=1, |
|
55 | min=1, | |
56 | not_empty=True, |
|
56 | not_empty=True, | |
57 | messages={ |
|
57 | messages={ | |
58 | 'empty': _('Please enter a login'), |
|
58 | 'empty': _('Please enter a login'), | |
59 | 'tooShort': _('Enter a value %(min)i characters long or more')} |
|
59 | 'tooShort': _('Enter a value %(min)i characters long or more')} | |
60 | ) |
|
60 | ) | |
61 |
|
61 | |||
62 | password = v.UnicodeString( |
|
62 | password = v.UnicodeString( | |
63 | strip=False, |
|
63 | strip=False, | |
64 | min=3, |
|
64 | min=3, | |
65 | not_empty=True, |
|
65 | not_empty=True, | |
66 | messages={ |
|
66 | messages={ | |
67 | 'empty': _('Please enter a password'), |
|
67 | 'empty': _('Please enter a password'), | |
68 | 'tooShort': _('Enter %(min)i characters or more')} |
|
68 | 'tooShort': _('Enter %(min)i characters or more')} | |
69 | ) |
|
69 | ) | |
70 |
|
70 | |||
71 | remember = v.StringBoolean(if_missing=False) |
|
71 | remember = v.StringBoolean(if_missing=False) | |
72 |
|
72 | |||
73 | chained_validators = [v.ValidAuth()] |
|
73 | chained_validators = [v.ValidAuth()] | |
74 | return _LoginForm |
|
74 | return _LoginForm | |
75 |
|
75 | |||
76 |
|
76 | |||
77 | def PasswordChangeForm(username): |
|
77 | def PasswordChangeForm(username): | |
78 | class _PasswordChangeForm(formencode.Schema): |
|
78 | class _PasswordChangeForm(formencode.Schema): | |
79 | allow_extra_fields = True |
|
79 | allow_extra_fields = True | |
80 | filter_extra_fields = True |
|
80 | filter_extra_fields = True | |
81 |
|
81 | |||
82 | current_password = v.ValidOldPassword(username)(not_empty=True) |
|
82 | current_password = v.ValidOldPassword(username)(not_empty=True) | |
83 | new_password = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
83 | new_password = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) | |
84 | new_password_confirmation = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
84 | new_password_confirmation = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) | |
85 |
|
85 | |||
86 | chained_validators = [v.ValidPasswordsMatch('new_password', |
|
86 | chained_validators = [v.ValidPasswordsMatch('new_password', | |
87 | 'new_password_confirmation')] |
|
87 | 'new_password_confirmation')] | |
88 | return _PasswordChangeForm |
|
88 | return _PasswordChangeForm | |
89 |
|
89 | |||
90 |
|
90 | |||
91 | def UserForm(edit=False, old_data=None): |
|
91 | def UserForm(edit=False, old_data=None): | |
92 | old_data = old_data or {} |
|
92 | old_data = old_data or {} | |
93 |
|
93 | |||
94 | class _UserForm(formencode.Schema): |
|
94 | class _UserForm(formencode.Schema): | |
95 | allow_extra_fields = True |
|
95 | allow_extra_fields = True | |
96 | filter_extra_fields = True |
|
96 | filter_extra_fields = True | |
97 | username = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
97 | username = All(v.UnicodeString(strip=True, min=1, not_empty=True), | |
98 | v.ValidUsername(edit, old_data)) |
|
98 | v.ValidUsername(edit, old_data)) | |
99 | if edit: |
|
99 | if edit: | |
100 | new_password = All( |
|
100 | new_password = All( | |
101 | v.ValidPassword(), |
|
101 | v.ValidPassword(), | |
102 | v.UnicodeString(strip=False, min=6, not_empty=False) |
|
102 | v.UnicodeString(strip=False, min=6, not_empty=False) | |
103 | ) |
|
103 | ) | |
104 | password_confirmation = All( |
|
104 | password_confirmation = All( | |
105 | v.ValidPassword(), |
|
105 | v.ValidPassword(), | |
106 | v.UnicodeString(strip=False, min=6, not_empty=False), |
|
106 | v.UnicodeString(strip=False, min=6, not_empty=False), | |
107 | ) |
|
107 | ) | |
108 | admin = v.StringBoolean(if_missing=False) |
|
108 | admin = v.StringBoolean(if_missing=False) | |
109 | chained_validators = [v.ValidPasswordsMatch('new_password', |
|
109 | chained_validators = [v.ValidPasswordsMatch('new_password', | |
110 | 'password_confirmation')] |
|
110 | 'password_confirmation')] | |
111 | else: |
|
111 | else: | |
112 | password = All( |
|
112 | password = All( | |
113 | v.ValidPassword(), |
|
113 | v.ValidPassword(), | |
114 | v.UnicodeString(strip=False, min=6, not_empty=True) |
|
114 | v.UnicodeString(strip=False, min=6, not_empty=True) | |
115 | ) |
|
115 | ) | |
116 | password_confirmation = All( |
|
116 | password_confirmation = All( | |
117 | v.ValidPassword(), |
|
117 | v.ValidPassword(), | |
118 | v.UnicodeString(strip=False, min=6, not_empty=False) |
|
118 | v.UnicodeString(strip=False, min=6, not_empty=False) | |
119 | ) |
|
119 | ) | |
120 | chained_validators = [v.ValidPasswordsMatch('password', |
|
120 | chained_validators = [v.ValidPasswordsMatch('password', | |
121 | 'password_confirmation')] |
|
121 | 'password_confirmation')] | |
122 |
|
122 | |||
123 | active = v.StringBoolean(if_missing=False) |
|
123 | active = v.StringBoolean(if_missing=False) | |
124 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
124 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) | |
125 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
125 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) | |
126 | email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data)) |
|
126 | email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data)) | |
127 | extern_name = v.UnicodeString(strip=True, if_missing=None) |
|
127 | extern_name = v.UnicodeString(strip=True, if_missing=None) | |
128 | extern_type = v.UnicodeString(strip=True, if_missing=None) |
|
128 | extern_type = v.UnicodeString(strip=True, if_missing=None) | |
129 | return _UserForm |
|
129 | return _UserForm | |
130 |
|
130 | |||
131 |
|
131 | |||
132 | def UserGroupForm(edit=False, old_data=None, available_members=None): |
|
132 | def UserGroupForm(edit=False, old_data=None, available_members=None): | |
133 | old_data = old_data or {} |
|
133 | old_data = old_data or {} | |
134 | available_members = available_members or [] |
|
134 | available_members = available_members or [] | |
135 |
|
135 | |||
136 | class _UserGroupForm(formencode.Schema): |
|
136 | class _UserGroupForm(formencode.Schema): | |
137 | allow_extra_fields = True |
|
137 | allow_extra_fields = True | |
138 | filter_extra_fields = True |
|
138 | filter_extra_fields = True | |
139 |
|
139 | |||
140 | users_group_name = All( |
|
140 | users_group_name = All( | |
141 | v.UnicodeString(strip=True, min=1, not_empty=True), |
|
141 | v.UnicodeString(strip=True, min=1, not_empty=True), | |
142 | v.ValidUserGroup(edit, old_data) |
|
142 | v.ValidUserGroup(edit, old_data) | |
143 | ) |
|
143 | ) | |
144 | user_group_description = v.UnicodeString(strip=True, min=1, |
|
144 | user_group_description = v.UnicodeString(strip=True, min=1, | |
145 | not_empty=False) |
|
145 | not_empty=False) | |
146 |
|
146 | |||
147 | users_group_active = v.StringBoolean(if_missing=False) |
|
147 | users_group_active = v.StringBoolean(if_missing=False) | |
148 |
|
148 | |||
149 | if edit: |
|
149 | if edit: | |
150 | users_group_members = v.OneOf( |
|
150 | users_group_members = v.OneOf( | |
151 | available_members, hideList=False, testValueList=True, |
|
151 | available_members, hideList=False, testValueList=True, | |
152 | if_missing=None, not_empty=False |
|
152 | if_missing=None, not_empty=False | |
153 | ) |
|
153 | ) | |
154 |
|
154 | |||
155 | return _UserGroupForm |
|
155 | return _UserGroupForm | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | def RepoGroupForm(edit=False, old_data=None, repo_groups=None, |
|
158 | def RepoGroupForm(edit=False, old_data=None, repo_groups=None, | |
159 | can_create_in_root=False): |
|
159 | can_create_in_root=False): | |
160 | old_data = old_data or {} |
|
160 | old_data = old_data or {} | |
161 | repo_groups = repo_groups or [] |
|
161 | repo_groups = repo_groups or [] | |
162 | repo_group_ids = [rg[0] for rg in repo_groups] |
|
162 | repo_group_ids = [rg[0] for rg in repo_groups] | |
163 |
|
163 | |||
164 | class _RepoGroupForm(formencode.Schema): |
|
164 | class _RepoGroupForm(formencode.Schema): | |
165 | allow_extra_fields = True |
|
165 | allow_extra_fields = True | |
166 | filter_extra_fields = False |
|
166 | filter_extra_fields = False | |
167 |
|
167 | |||
168 | group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
168 | group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), | |
169 | v.SlugifyName(), |
|
169 | v.SlugifyName(), | |
170 | v.ValidRegex(msg=_('Name must not contain only digits'))(r'(?!^\d+$)^.+$')) |
|
170 | v.ValidRegex(msg=_('Name must not contain only digits'))(r'(?!^\d+$)^.+$')) | |
171 | group_description = v.UnicodeString(strip=True, min=1, |
|
171 | group_description = v.UnicodeString(strip=True, min=1, | |
172 | not_empty=False) |
|
172 | not_empty=False) | |
173 | group_copy_permissions = v.StringBoolean(if_missing=False) |
|
173 | group_copy_permissions = v.StringBoolean(if_missing=False) | |
174 |
|
174 | |||
175 | if edit: |
|
175 | if edit: | |
176 | # FIXME: do a special check that we cannot move a group to one of |
|
176 | # FIXME: do a special check that we cannot move a group to one of | |
177 | # its children |
|
177 | # its children | |
178 | pass |
|
178 | pass | |
179 |
|
179 | |||
180 | parent_group_id = All(v.CanCreateGroup(can_create_in_root), |
|
180 | parent_group_id = All(v.CanCreateGroup(can_create_in_root), | |
181 | v.OneOf(repo_group_ids, hideList=False, |
|
181 | v.OneOf(repo_group_ids, hideList=False, | |
182 | testValueList=True, |
|
182 | testValueList=True, | |
183 | if_missing=None, not_empty=True), |
|
183 | if_missing=None, not_empty=True), | |
184 | v.Int(min=-1, not_empty=True)) |
|
184 | v.Int(min=-1, not_empty=True)) | |
185 | chained_validators = [v.ValidRepoGroup(edit, old_data)] |
|
185 | chained_validators = [v.ValidRepoGroup(edit, old_data)] | |
186 |
|
186 | |||
187 | return _RepoGroupForm |
|
187 | return _RepoGroupForm | |
188 |
|
188 | |||
189 |
|
189 | |||
190 | def RegisterForm(edit=False, old_data=None): |
|
190 | def RegisterForm(edit=False, old_data=None): | |
191 | class _RegisterForm(formencode.Schema): |
|
191 | class _RegisterForm(formencode.Schema): | |
192 | allow_extra_fields = True |
|
192 | allow_extra_fields = True | |
193 | filter_extra_fields = True |
|
193 | filter_extra_fields = True | |
194 | username = All( |
|
194 | username = All( | |
195 | v.ValidUsername(edit, old_data), |
|
195 | v.ValidUsername(edit, old_data), | |
196 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
196 | v.UnicodeString(strip=True, min=1, not_empty=True) | |
197 | ) |
|
197 | ) | |
198 | password = All( |
|
198 | password = All( | |
199 | v.ValidPassword(), |
|
199 | v.ValidPassword(), | |
200 | v.UnicodeString(strip=False, min=6, not_empty=True) |
|
200 | v.UnicodeString(strip=False, min=6, not_empty=True) | |
201 | ) |
|
201 | ) | |
202 | password_confirmation = All( |
|
202 | password_confirmation = All( | |
203 | v.ValidPassword(), |
|
203 | v.ValidPassword(), | |
204 | v.UnicodeString(strip=False, min=6, not_empty=True) |
|
204 | v.UnicodeString(strip=False, min=6, not_empty=True) | |
205 | ) |
|
205 | ) | |
206 | active = v.StringBoolean(if_missing=False) |
|
206 | active = v.StringBoolean(if_missing=False) | |
207 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
207 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) | |
208 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
208 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) | |
209 | email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data)) |
|
209 | email = All(v.Email(not_empty=True), v.UniqSystemEmail(old_data)) | |
210 |
|
210 | |||
211 | chained_validators = [v.ValidPasswordsMatch('password', |
|
211 | chained_validators = [v.ValidPasswordsMatch('password', | |
212 | 'password_confirmation')] |
|
212 | 'password_confirmation')] | |
213 |
|
213 | |||
214 | return _RegisterForm |
|
214 | return _RegisterForm | |
215 |
|
215 | |||
216 |
|
216 | |||
217 | def PasswordResetRequestForm(): |
|
217 | def PasswordResetRequestForm(): | |
218 | class _PasswordResetRequestForm(formencode.Schema): |
|
218 | class _PasswordResetRequestForm(formencode.Schema): | |
219 | allow_extra_fields = True |
|
219 | allow_extra_fields = True | |
220 | filter_extra_fields = True |
|
220 | filter_extra_fields = True | |
221 | email = v.Email(not_empty=True) |
|
221 | email = v.Email(not_empty=True) | |
222 | return _PasswordResetRequestForm |
|
222 | return _PasswordResetRequestForm | |
223 |
|
223 | |||
224 |
|
224 | |||
225 | def PasswordResetConfirmationForm(): |
|
225 | def PasswordResetConfirmationForm(): | |
226 | class _PasswordResetConfirmationForm(formencode.Schema): |
|
226 | class _PasswordResetConfirmationForm(formencode.Schema): | |
227 | allow_extra_fields = True |
|
227 | allow_extra_fields = True | |
228 | filter_extra_fields = True |
|
228 | filter_extra_fields = True | |
229 |
|
229 | |||
230 | email = v.UnicodeString(strip=True, not_empty=True) |
|
230 | email = v.UnicodeString(strip=True, not_empty=True) | |
231 | timestamp = v.Number(strip=True, not_empty=True) |
|
231 | timestamp = v.Number(strip=True, not_empty=True) | |
232 | token = v.UnicodeString(strip=True, not_empty=True) |
|
232 | token = v.UnicodeString(strip=True, not_empty=True) | |
233 | password = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
233 | password = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) | |
234 | password_confirm = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) |
|
234 | password_confirm = All(v.ValidPassword(), v.UnicodeString(strip=False, min=6)) | |
235 |
|
235 | |||
236 | chained_validators = [v.ValidPasswordsMatch('password', |
|
236 | chained_validators = [v.ValidPasswordsMatch('password', | |
237 | 'password_confirm')] |
|
237 | 'password_confirm')] | |
238 | return _PasswordResetConfirmationForm |
|
238 | return _PasswordResetConfirmationForm | |
239 |
|
239 | |||
240 |
|
240 | |||
241 | def RepoForm(edit=False, old_data=None, supported_backends=BACKENDS, |
|
241 | def RepoForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS, | |
242 | repo_groups=None, landing_revs=None): |
|
242 | repo_groups=None, landing_revs=None): | |
243 | old_data = old_data or {} |
|
243 | old_data = old_data or {} | |
244 | repo_groups = repo_groups or [] |
|
244 | repo_groups = repo_groups or [] | |
245 | landing_revs = landing_revs or [] |
|
245 | landing_revs = landing_revs or [] | |
246 | repo_group_ids = [rg[0] for rg in repo_groups] |
|
246 | repo_group_ids = [rg[0] for rg in repo_groups] | |
247 |
|
247 | |||
248 | class _RepoForm(formencode.Schema): |
|
248 | class _RepoForm(formencode.Schema): | |
249 | allow_extra_fields = True |
|
249 | allow_extra_fields = True | |
250 | filter_extra_fields = False |
|
250 | filter_extra_fields = False | |
251 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
251 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), | |
252 | v.SlugifyName()) |
|
252 | v.SlugifyName()) | |
253 | repo_group = All(v.CanWriteGroup(old_data), |
|
253 | repo_group = All(v.CanWriteGroup(old_data), | |
254 | v.OneOf(repo_group_ids, hideList=True), |
|
254 | v.OneOf(repo_group_ids, hideList=True), | |
255 | v.Int(min=-1, not_empty=True)) |
|
255 | v.Int(min=-1, not_empty=True)) | |
256 | repo_type = v.OneOf(supported_backends, required=False, |
|
256 | repo_type = v.OneOf(supported_backends, required=False, | |
257 | if_missing=old_data.get('repo_type')) |
|
257 | if_missing=old_data.get('repo_type')) | |
258 | repo_description = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
258 | repo_description = v.UnicodeString(strip=True, min=1, not_empty=False) | |
259 | repo_private = v.StringBoolean(if_missing=False) |
|
259 | repo_private = v.StringBoolean(if_missing=False) | |
260 | repo_landing_rev = v.OneOf(landing_revs, hideList=True) |
|
260 | repo_landing_rev = v.OneOf(landing_revs, hideList=True) | |
261 | repo_copy_permissions = v.StringBoolean(if_missing=False) |
|
261 | repo_copy_permissions = v.StringBoolean(if_missing=False) | |
262 | clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False)) |
|
262 | clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False)) | |
263 |
|
263 | |||
264 | repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
264 | repo_enable_statistics = v.StringBoolean(if_missing=False) | |
265 | repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
265 | repo_enable_downloads = v.StringBoolean(if_missing=False) | |
266 |
|
266 | |||
267 | if edit: |
|
267 | if edit: | |
268 | owner = All(v.UnicodeString(not_empty=True), v.ValidRepoUser()) |
|
268 | owner = All(v.UnicodeString(not_empty=True), v.ValidRepoUser()) | |
269 | # Not a real field - just for reference for validation: |
|
269 | # Not a real field - just for reference for validation: | |
270 | # clone_uri_hidden = v.UnicodeString(if_missing='') |
|
270 | # clone_uri_hidden = v.UnicodeString(if_missing='') | |
271 |
|
271 | |||
272 | chained_validators = [v.ValidCloneUri(), |
|
272 | chained_validators = [v.ValidCloneUri(), | |
273 | v.ValidRepoName(edit, old_data)] |
|
273 | v.ValidRepoName(edit, old_data)] | |
274 | return _RepoForm |
|
274 | return _RepoForm | |
275 |
|
275 | |||
276 |
|
276 | |||
277 | def RepoPermsForm(): |
|
277 | def RepoPermsForm(): | |
278 | class _RepoPermsForm(formencode.Schema): |
|
278 | class _RepoPermsForm(formencode.Schema): | |
279 | allow_extra_fields = True |
|
279 | allow_extra_fields = True | |
280 | filter_extra_fields = False |
|
280 | filter_extra_fields = False | |
281 | chained_validators = [v.ValidPerms(type_='repo')] |
|
281 | chained_validators = [v.ValidPerms(type_='repo')] | |
282 | return _RepoPermsForm |
|
282 | return _RepoPermsForm | |
283 |
|
283 | |||
284 |
|
284 | |||
285 | def RepoGroupPermsForm(valid_recursive_choices): |
|
285 | def RepoGroupPermsForm(valid_recursive_choices): | |
286 | class _RepoGroupPermsForm(formencode.Schema): |
|
286 | class _RepoGroupPermsForm(formencode.Schema): | |
287 | allow_extra_fields = True |
|
287 | allow_extra_fields = True | |
288 | filter_extra_fields = False |
|
288 | filter_extra_fields = False | |
289 | recursive = v.OneOf(valid_recursive_choices) |
|
289 | recursive = v.OneOf(valid_recursive_choices) | |
290 | chained_validators = [v.ValidPerms(type_='repo_group')] |
|
290 | chained_validators = [v.ValidPerms(type_='repo_group')] | |
291 | return _RepoGroupPermsForm |
|
291 | return _RepoGroupPermsForm | |
292 |
|
292 | |||
293 |
|
293 | |||
294 | def UserGroupPermsForm(): |
|
294 | def UserGroupPermsForm(): | |
295 | class _UserPermsForm(formencode.Schema): |
|
295 | class _UserPermsForm(formencode.Schema): | |
296 | allow_extra_fields = True |
|
296 | allow_extra_fields = True | |
297 | filter_extra_fields = False |
|
297 | filter_extra_fields = False | |
298 | chained_validators = [v.ValidPerms(type_='user_group')] |
|
298 | chained_validators = [v.ValidPerms(type_='user_group')] | |
299 | return _UserPermsForm |
|
299 | return _UserPermsForm | |
300 |
|
300 | |||
301 |
|
301 | |||
302 | def RepoFieldForm(): |
|
302 | def RepoFieldForm(): | |
303 | class _RepoFieldForm(formencode.Schema): |
|
303 | class _RepoFieldForm(formencode.Schema): | |
304 | filter_extra_fields = True |
|
304 | filter_extra_fields = True | |
305 | allow_extra_fields = True |
|
305 | allow_extra_fields = True | |
306 |
|
306 | |||
307 | new_field_key = All(v.FieldKey(), |
|
307 | new_field_key = All(v.FieldKey(), | |
308 | v.UnicodeString(strip=True, min=3, not_empty=True)) |
|
308 | v.UnicodeString(strip=True, min=3, not_empty=True)) | |
309 | new_field_value = v.UnicodeString(not_empty=False, if_missing='') |
|
309 | new_field_value = v.UnicodeString(not_empty=False, if_missing='') | |
310 | new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'], |
|
310 | new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'], | |
311 | if_missing='str') |
|
311 | if_missing='str') | |
312 | new_field_label = v.UnicodeString(not_empty=False) |
|
312 | new_field_label = v.UnicodeString(not_empty=False) | |
313 | new_field_desc = v.UnicodeString(not_empty=False) |
|
313 | new_field_desc = v.UnicodeString(not_empty=False) | |
314 |
|
314 | |||
315 | return _RepoFieldForm |
|
315 | return _RepoFieldForm | |
316 |
|
316 | |||
317 |
|
317 | |||
318 | def RepoForkForm(edit=False, old_data=None, supported_backends=BACKENDS, |
|
318 | def RepoForkForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS, | |
319 | repo_groups=None, landing_revs=None): |
|
319 | repo_groups=None, landing_revs=None): | |
320 | old_data = old_data or {} |
|
320 | old_data = old_data or {} | |
321 | repo_groups = repo_groups or [] |
|
321 | repo_groups = repo_groups or [] | |
322 | landing_revs = landing_revs or [] |
|
322 | landing_revs = landing_revs or [] | |
323 | repo_group_ids = [rg[0] for rg in repo_groups] |
|
323 | repo_group_ids = [rg[0] for rg in repo_groups] | |
324 |
|
324 | |||
325 | class _RepoForkForm(formencode.Schema): |
|
325 | class _RepoForkForm(formencode.Schema): | |
326 | allow_extra_fields = True |
|
326 | allow_extra_fields = True | |
327 | filter_extra_fields = False |
|
327 | filter_extra_fields = False | |
328 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
328 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), | |
329 | v.SlugifyName()) |
|
329 | v.SlugifyName()) | |
330 | repo_group = All(v.CanWriteGroup(), |
|
330 | repo_group = All(v.CanWriteGroup(), | |
331 | v.OneOf(repo_group_ids, hideList=True), |
|
331 | v.OneOf(repo_group_ids, hideList=True), | |
332 | v.Int(min=-1, not_empty=True)) |
|
332 | v.Int(min=-1, not_empty=True)) | |
333 | repo_type = All(v.ValidForkType(old_data), v.OneOf(supported_backends)) |
|
333 | repo_type = All(v.ValidForkType(old_data), v.OneOf(supported_backends)) | |
334 | description = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
334 | description = v.UnicodeString(strip=True, min=1, not_empty=True) | |
335 | private = v.StringBoolean(if_missing=False) |
|
335 | private = v.StringBoolean(if_missing=False) | |
336 | copy_permissions = v.StringBoolean(if_missing=False) |
|
336 | copy_permissions = v.StringBoolean(if_missing=False) | |
337 | update_after_clone = v.StringBoolean(if_missing=False) |
|
337 | update_after_clone = v.StringBoolean(if_missing=False) | |
338 | fork_parent_id = v.UnicodeString() |
|
338 | fork_parent_id = v.UnicodeString() | |
339 | chained_validators = [v.ValidForkName(edit, old_data)] |
|
339 | chained_validators = [v.ValidForkName(edit, old_data)] | |
340 | landing_rev = v.OneOf(landing_revs, hideList=True) |
|
340 | landing_rev = v.OneOf(landing_revs, hideList=True) | |
341 |
|
341 | |||
342 | return _RepoForkForm |
|
342 | return _RepoForkForm | |
343 |
|
343 | |||
344 |
|
344 | |||
345 | def ApplicationSettingsForm(): |
|
345 | def ApplicationSettingsForm(): | |
346 | class _ApplicationSettingsForm(formencode.Schema): |
|
346 | class _ApplicationSettingsForm(formencode.Schema): | |
347 | allow_extra_fields = True |
|
347 | allow_extra_fields = True | |
348 | filter_extra_fields = False |
|
348 | filter_extra_fields = False | |
349 | title = v.UnicodeString(strip=True, not_empty=False) |
|
349 | title = v.UnicodeString(strip=True, not_empty=False) | |
350 | realm = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
350 | realm = v.UnicodeString(strip=True, min=1, not_empty=True) | |
351 | ga_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
351 | ga_code = v.UnicodeString(strip=True, min=1, not_empty=False) | |
352 | captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
352 | captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False) | |
353 | captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
353 | captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False) | |
354 |
|
354 | |||
355 | return _ApplicationSettingsForm |
|
355 | return _ApplicationSettingsForm | |
356 |
|
356 | |||
357 |
|
357 | |||
358 | def ApplicationVisualisationForm(): |
|
358 | def ApplicationVisualisationForm(): | |
359 | class _ApplicationVisualisationForm(formencode.Schema): |
|
359 | class _ApplicationVisualisationForm(formencode.Schema): | |
360 | allow_extra_fields = True |
|
360 | allow_extra_fields = True | |
361 | filter_extra_fields = False |
|
361 | filter_extra_fields = False | |
362 | show_public_icon = v.StringBoolean(if_missing=False) |
|
362 | show_public_icon = v.StringBoolean(if_missing=False) | |
363 | show_private_icon = v.StringBoolean(if_missing=False) |
|
363 | show_private_icon = v.StringBoolean(if_missing=False) | |
364 | stylify_metalabels = v.StringBoolean(if_missing=False) |
|
364 | stylify_metalabels = v.StringBoolean(if_missing=False) | |
365 |
|
365 | |||
366 | repository_fields = v.StringBoolean(if_missing=False) |
|
366 | repository_fields = v.StringBoolean(if_missing=False) | |
367 | lightweight_journal = v.StringBoolean(if_missing=False) |
|
367 | lightweight_journal = v.StringBoolean(if_missing=False) | |
368 | dashboard_items = v.Int(min=5, not_empty=True) |
|
368 | dashboard_items = v.Int(min=5, not_empty=True) | |
369 | admin_grid_items = v.Int(min=5, not_empty=True) |
|
369 | admin_grid_items = v.Int(min=5, not_empty=True) | |
370 | show_version = v.StringBoolean(if_missing=False) |
|
370 | show_version = v.StringBoolean(if_missing=False) | |
371 | use_gravatar = v.StringBoolean(if_missing=False) |
|
371 | use_gravatar = v.StringBoolean(if_missing=False) | |
372 | gravatar_url = v.UnicodeString(min=3) |
|
372 | gravatar_url = v.UnicodeString(min=3) | |
373 | clone_uri_tmpl = v.UnicodeString(min=3) |
|
373 | clone_uri_tmpl = v.UnicodeString(min=3) | |
374 | clone_ssh_tmpl = v.UnicodeString() |
|
374 | clone_ssh_tmpl = v.UnicodeString() | |
375 |
|
375 | |||
376 | return _ApplicationVisualisationForm |
|
376 | return _ApplicationVisualisationForm | |
377 |
|
377 | |||
378 |
|
378 | |||
379 | def ApplicationUiSettingsForm(): |
|
379 | def ApplicationUiSettingsForm(): | |
380 | class _ApplicationUiSettingsForm(formencode.Schema): |
|
380 | class _ApplicationUiSettingsForm(formencode.Schema): | |
381 | allow_extra_fields = True |
|
381 | allow_extra_fields = True | |
382 | filter_extra_fields = False |
|
382 | filter_extra_fields = False | |
383 | paths_root_path = All( |
|
383 | paths_root_path = All( | |
384 | v.ValidPath(), |
|
384 | v.ValidPath(), | |
385 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
385 | v.UnicodeString(strip=True, min=1, not_empty=True) | |
386 | ) |
|
386 | ) | |
387 | hooks_changegroup_update = v.StringBoolean(if_missing=False) |
|
387 | hooks_changegroup_update = v.StringBoolean(if_missing=False) | |
388 | hooks_changegroup_repo_size = v.StringBoolean(if_missing=False) |
|
388 | hooks_changegroup_repo_size = v.StringBoolean(if_missing=False) | |
389 |
|
389 | |||
390 | extensions_largefiles = v.StringBoolean(if_missing=False) |
|
390 | extensions_largefiles = v.StringBoolean(if_missing=False) | |
391 | extensions_hggit = v.StringBoolean(if_missing=False) |
|
391 | extensions_hggit = v.StringBoolean(if_missing=False) | |
392 |
|
392 | |||
393 | return _ApplicationUiSettingsForm |
|
393 | return _ApplicationUiSettingsForm | |
394 |
|
394 | |||
395 |
|
395 | |||
396 | def DefaultPermissionsForm(repo_perms_choices, group_perms_choices, |
|
396 | def DefaultPermissionsForm(repo_perms_choices, group_perms_choices, | |
397 | user_group_perms_choices, create_choices, |
|
397 | user_group_perms_choices, create_choices, | |
398 | user_group_create_choices, fork_choices, |
|
398 | user_group_create_choices, fork_choices, | |
399 | register_choices, extern_activate_choices): |
|
399 | register_choices, extern_activate_choices): | |
400 | class _DefaultPermissionsForm(formencode.Schema): |
|
400 | class _DefaultPermissionsForm(formencode.Schema): | |
401 | allow_extra_fields = True |
|
401 | allow_extra_fields = True | |
402 | filter_extra_fields = True |
|
402 | filter_extra_fields = True | |
403 | overwrite_default_repo = v.StringBoolean(if_missing=False) |
|
403 | overwrite_default_repo = v.StringBoolean(if_missing=False) | |
404 | overwrite_default_group = v.StringBoolean(if_missing=False) |
|
404 | overwrite_default_group = v.StringBoolean(if_missing=False) | |
405 | overwrite_default_user_group = v.StringBoolean(if_missing=False) |
|
405 | overwrite_default_user_group = v.StringBoolean(if_missing=False) | |
406 | anonymous = v.StringBoolean(if_missing=False) |
|
406 | anonymous = v.StringBoolean(if_missing=False) | |
407 | default_repo_perm = v.OneOf(repo_perms_choices) |
|
407 | default_repo_perm = v.OneOf(repo_perms_choices) | |
408 | default_group_perm = v.OneOf(group_perms_choices) |
|
408 | default_group_perm = v.OneOf(group_perms_choices) | |
409 | default_user_group_perm = v.OneOf(user_group_perms_choices) |
|
409 | default_user_group_perm = v.OneOf(user_group_perms_choices) | |
410 |
|
410 | |||
411 | default_repo_create = v.OneOf(create_choices) |
|
411 | default_repo_create = v.OneOf(create_choices) | |
412 | default_user_group_create = v.OneOf(user_group_create_choices) |
|
412 | default_user_group_create = v.OneOf(user_group_create_choices) | |
413 | default_fork = v.OneOf(fork_choices) |
|
413 | default_fork = v.OneOf(fork_choices) | |
414 |
|
414 | |||
415 | default_register = v.OneOf(register_choices) |
|
415 | default_register = v.OneOf(register_choices) | |
416 | default_extern_activate = v.OneOf(extern_activate_choices) |
|
416 | default_extern_activate = v.OneOf(extern_activate_choices) | |
417 | return _DefaultPermissionsForm |
|
417 | return _DefaultPermissionsForm | |
418 |
|
418 | |||
419 |
|
419 | |||
420 | def CustomDefaultPermissionsForm(): |
|
420 | def CustomDefaultPermissionsForm(): | |
421 | class _CustomDefaultPermissionsForm(formencode.Schema): |
|
421 | class _CustomDefaultPermissionsForm(formencode.Schema): | |
422 | filter_extra_fields = True |
|
422 | filter_extra_fields = True | |
423 | allow_extra_fields = True |
|
423 | allow_extra_fields = True | |
424 |
|
424 | |||
425 | create_repo_perm = v.StringBoolean(if_missing=False) |
|
425 | create_repo_perm = v.StringBoolean(if_missing=False) | |
426 | create_user_group_perm = v.StringBoolean(if_missing=False) |
|
426 | create_user_group_perm = v.StringBoolean(if_missing=False) | |
427 | #create_repo_group_perm Impl. later |
|
427 | #create_repo_group_perm Impl. later | |
428 |
|
428 | |||
429 | fork_repo_perm = v.StringBoolean(if_missing=False) |
|
429 | fork_repo_perm = v.StringBoolean(if_missing=False) | |
430 |
|
430 | |||
431 | return _CustomDefaultPermissionsForm |
|
431 | return _CustomDefaultPermissionsForm | |
432 |
|
432 | |||
433 |
|
433 | |||
434 | def DefaultsForm(edit=False, old_data=None, supported_backends=BACKENDS): |
|
434 | def DefaultsForm(edit=False, old_data=None, supported_backends=kallithea.BACKENDS): | |
435 | class _DefaultsForm(formencode.Schema): |
|
435 | class _DefaultsForm(formencode.Schema): | |
436 | allow_extra_fields = True |
|
436 | allow_extra_fields = True | |
437 | filter_extra_fields = True |
|
437 | filter_extra_fields = True | |
438 | default_repo_type = v.OneOf(supported_backends) |
|
438 | default_repo_type = v.OneOf(supported_backends) | |
439 | default_repo_private = v.StringBoolean(if_missing=False) |
|
439 | default_repo_private = v.StringBoolean(if_missing=False) | |
440 | default_repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
440 | default_repo_enable_statistics = v.StringBoolean(if_missing=False) | |
441 | default_repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
441 | default_repo_enable_downloads = v.StringBoolean(if_missing=False) | |
442 |
|
442 | |||
443 | return _DefaultsForm |
|
443 | return _DefaultsForm | |
444 |
|
444 | |||
445 |
|
445 | |||
446 | def AuthSettingsForm(current_active_modules): |
|
446 | def AuthSettingsForm(current_active_modules): | |
447 | class _AuthSettingsForm(formencode.Schema): |
|
447 | class _AuthSettingsForm(formencode.Schema): | |
448 | allow_extra_fields = True |
|
448 | allow_extra_fields = True | |
449 | filter_extra_fields = True |
|
449 | filter_extra_fields = True | |
450 | auth_plugins = All(v.ValidAuthPlugins(), |
|
450 | auth_plugins = All(v.ValidAuthPlugins(), | |
451 | v.UniqueListFromString()(not_empty=True)) |
|
451 | v.UniqueListFromString()(not_empty=True)) | |
452 |
|
452 | |||
453 | def __init__(self, *args, **kwargs): |
|
453 | def __init__(self, *args, **kwargs): | |
454 | # The auth plugins tell us what form validators they use |
|
454 | # The auth plugins tell us what form validators they use | |
455 | if current_active_modules: |
|
455 | if current_active_modules: | |
456 | import kallithea.lib.auth_modules |
|
456 | import kallithea.lib.auth_modules | |
457 | from kallithea.lib.auth_modules import LazyFormencode |
|
457 | from kallithea.lib.auth_modules import LazyFormencode | |
458 | for module in current_active_modules: |
|
458 | for module in current_active_modules: | |
459 | plugin = kallithea.lib.auth_modules.loadplugin(module) |
|
459 | plugin = kallithea.lib.auth_modules.loadplugin(module) | |
460 | plugin_name = plugin.name |
|
460 | plugin_name = plugin.name | |
461 | for sv in plugin.plugin_settings(): |
|
461 | for sv in plugin.plugin_settings(): | |
462 | newk = "auth_%s_%s" % (plugin_name, sv["name"]) |
|
462 | newk = "auth_%s_%s" % (plugin_name, sv["name"]) | |
463 | # can be a LazyFormencode object from plugin settings |
|
463 | # can be a LazyFormencode object from plugin settings | |
464 | validator = sv["validator"] |
|
464 | validator = sv["validator"] | |
465 | if isinstance(validator, LazyFormencode): |
|
465 | if isinstance(validator, LazyFormencode): | |
466 | validator = validator() |
|
466 | validator = validator() | |
467 | # init all lazy validators from formencode.All |
|
467 | # init all lazy validators from formencode.All | |
468 | if isinstance(validator, All): |
|
468 | if isinstance(validator, All): | |
469 | init_validators = [] |
|
469 | init_validators = [] | |
470 | for validator in validator.validators: |
|
470 | for validator in validator.validators: | |
471 | if isinstance(validator, LazyFormencode): |
|
471 | if isinstance(validator, LazyFormencode): | |
472 | validator = validator() |
|
472 | validator = validator() | |
473 | init_validators.append(validator) |
|
473 | init_validators.append(validator) | |
474 | validator.validators = init_validators |
|
474 | validator.validators = init_validators | |
475 |
|
475 | |||
476 | self.add_field(newk, validator) |
|
476 | self.add_field(newk, validator) | |
477 | formencode.Schema.__init__(self, *args, **kwargs) |
|
477 | formencode.Schema.__init__(self, *args, **kwargs) | |
478 |
|
478 | |||
479 | return _AuthSettingsForm |
|
479 | return _AuthSettingsForm | |
480 |
|
480 | |||
481 |
|
481 | |||
482 | def LdapSettingsForm(tls_reqcert_choices, search_scope_choices, |
|
482 | def LdapSettingsForm(tls_reqcert_choices, search_scope_choices, | |
483 | tls_kind_choices): |
|
483 | tls_kind_choices): | |
484 | class _LdapSettingsForm(formencode.Schema): |
|
484 | class _LdapSettingsForm(formencode.Schema): | |
485 | allow_extra_fields = True |
|
485 | allow_extra_fields = True | |
486 | filter_extra_fields = True |
|
486 | filter_extra_fields = True | |
487 | #pre_validators = [LdapLibValidator] |
|
487 | #pre_validators = [LdapLibValidator] | |
488 | ldap_active = v.StringBoolean(if_missing=False) |
|
488 | ldap_active = v.StringBoolean(if_missing=False) | |
489 | ldap_host = v.UnicodeString(strip=True,) |
|
489 | ldap_host = v.UnicodeString(strip=True,) | |
490 | ldap_port = v.Number(strip=True,) |
|
490 | ldap_port = v.Number(strip=True,) | |
491 | ldap_tls_kind = v.OneOf(tls_kind_choices) |
|
491 | ldap_tls_kind = v.OneOf(tls_kind_choices) | |
492 | ldap_tls_reqcert = v.OneOf(tls_reqcert_choices) |
|
492 | ldap_tls_reqcert = v.OneOf(tls_reqcert_choices) | |
493 | ldap_dn_user = v.UnicodeString(strip=True,) |
|
493 | ldap_dn_user = v.UnicodeString(strip=True,) | |
494 | ldap_dn_pass = v.UnicodeString(strip=True,) |
|
494 | ldap_dn_pass = v.UnicodeString(strip=True,) | |
495 | ldap_base_dn = v.UnicodeString(strip=True,) |
|
495 | ldap_base_dn = v.UnicodeString(strip=True,) | |
496 | ldap_filter = v.UnicodeString(strip=True,) |
|
496 | ldap_filter = v.UnicodeString(strip=True,) | |
497 | ldap_search_scope = v.OneOf(search_scope_choices) |
|
497 | ldap_search_scope = v.OneOf(search_scope_choices) | |
498 | ldap_attr_login = v.AttrLoginValidator()(not_empty=True) |
|
498 | ldap_attr_login = v.AttrLoginValidator()(not_empty=True) | |
499 | ldap_attr_firstname = v.UnicodeString(strip=True,) |
|
499 | ldap_attr_firstname = v.UnicodeString(strip=True,) | |
500 | ldap_attr_lastname = v.UnicodeString(strip=True,) |
|
500 | ldap_attr_lastname = v.UnicodeString(strip=True,) | |
501 | ldap_attr_email = v.UnicodeString(strip=True,) |
|
501 | ldap_attr_email = v.UnicodeString(strip=True,) | |
502 |
|
502 | |||
503 | return _LdapSettingsForm |
|
503 | return _LdapSettingsForm | |
504 |
|
504 | |||
505 |
|
505 | |||
506 | def UserExtraEmailForm(): |
|
506 | def UserExtraEmailForm(): | |
507 | class _UserExtraEmailForm(formencode.Schema): |
|
507 | class _UserExtraEmailForm(formencode.Schema): | |
508 | email = All(v.UniqSystemEmail(), v.Email(not_empty=True)) |
|
508 | email = All(v.UniqSystemEmail(), v.Email(not_empty=True)) | |
509 | return _UserExtraEmailForm |
|
509 | return _UserExtraEmailForm | |
510 |
|
510 | |||
511 |
|
511 | |||
512 | def UserExtraIpForm(): |
|
512 | def UserExtraIpForm(): | |
513 | class _UserExtraIpForm(formencode.Schema): |
|
513 | class _UserExtraIpForm(formencode.Schema): | |
514 | ip = v.ValidIp()(not_empty=True) |
|
514 | ip = v.ValidIp()(not_empty=True) | |
515 | return _UserExtraIpForm |
|
515 | return _UserExtraIpForm | |
516 |
|
516 | |||
517 |
|
517 | |||
518 | def PullRequestForm(repo_id): |
|
518 | def PullRequestForm(repo_id): | |
519 | class _PullRequestForm(formencode.Schema): |
|
519 | class _PullRequestForm(formencode.Schema): | |
520 | allow_extra_fields = True |
|
520 | allow_extra_fields = True | |
521 | filter_extra_fields = True |
|
521 | filter_extra_fields = True | |
522 |
|
522 | |||
523 | org_repo = v.UnicodeString(strip=True, required=True) |
|
523 | org_repo = v.UnicodeString(strip=True, required=True) | |
524 | org_ref = v.UnicodeString(strip=True, required=True) |
|
524 | org_ref = v.UnicodeString(strip=True, required=True) | |
525 | other_repo = v.UnicodeString(strip=True, required=True) |
|
525 | other_repo = v.UnicodeString(strip=True, required=True) | |
526 | other_ref = v.UnicodeString(strip=True, required=True) |
|
526 | other_ref = v.UnicodeString(strip=True, required=True) | |
527 |
|
527 | |||
528 | pullrequest_title = v.UnicodeString(strip=True, required=True) |
|
528 | pullrequest_title = v.UnicodeString(strip=True, required=True) | |
529 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
529 | pullrequest_desc = v.UnicodeString(strip=True, required=False) | |
530 |
|
530 | |||
531 | return _PullRequestForm |
|
531 | return _PullRequestForm | |
532 |
|
532 | |||
533 |
|
533 | |||
534 | def PullRequestPostForm(): |
|
534 | def PullRequestPostForm(): | |
535 | class _PullRequestPostForm(formencode.Schema): |
|
535 | class _PullRequestPostForm(formencode.Schema): | |
536 | allow_extra_fields = True |
|
536 | allow_extra_fields = True | |
537 | filter_extra_fields = True |
|
537 | filter_extra_fields = True | |
538 |
|
538 | |||
539 | pullrequest_title = v.UnicodeString(strip=True, required=True) |
|
539 | pullrequest_title = v.UnicodeString(strip=True, required=True) | |
540 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
540 | pullrequest_desc = v.UnicodeString(strip=True, required=False) | |
541 | org_review_members = v.Set() |
|
541 | org_review_members = v.Set() | |
542 | review_members = v.Set() |
|
542 | review_members = v.Set() | |
543 | updaterev = v.UnicodeString(strip=True, required=False, if_missing=None) |
|
543 | updaterev = v.UnicodeString(strip=True, required=False, if_missing=None) | |
544 | owner = All(v.UnicodeString(strip=True, required=True), |
|
544 | owner = All(v.UnicodeString(strip=True, required=True), | |
545 | v.ValidRepoUser()) |
|
545 | v.ValidRepoUser()) | |
546 |
|
546 | |||
547 | return _PullRequestPostForm |
|
547 | return _PullRequestPostForm | |
548 |
|
548 | |||
549 |
|
549 | |||
550 | def GistForm(lifetime_options): |
|
550 | def GistForm(lifetime_options): | |
551 | class _GistForm(formencode.Schema): |
|
551 | class _GistForm(formencode.Schema): | |
552 | allow_extra_fields = True |
|
552 | allow_extra_fields = True | |
553 | filter_extra_fields = True |
|
553 | filter_extra_fields = True | |
554 |
|
554 | |||
555 | filename = All(v.BasePath()(), |
|
555 | filename = All(v.BasePath()(), | |
556 | v.UnicodeString(strip=True, required=False)) |
|
556 | v.UnicodeString(strip=True, required=False)) | |
557 | description = v.UnicodeString(required=False, if_missing='') |
|
557 | description = v.UnicodeString(required=False, if_missing='') | |
558 | lifetime = v.OneOf(lifetime_options) |
|
558 | lifetime = v.OneOf(lifetime_options) | |
559 | mimetype = v.UnicodeString(required=False, if_missing=None) |
|
559 | mimetype = v.UnicodeString(required=False, if_missing=None) | |
560 | content = v.UnicodeString(required=True, not_empty=True) |
|
560 | content = v.UnicodeString(required=True, not_empty=True) | |
561 | public = v.UnicodeString(required=False, if_missing='') |
|
561 | public = v.UnicodeString(required=False, if_missing='') | |
562 | private = v.UnicodeString(required=False, if_missing='') |
|
562 | private = v.UnicodeString(required=False, if_missing='') | |
563 |
|
563 | |||
564 | return _GistForm |
|
564 | return _GistForm |
@@ -1,762 +1,761 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.model.scm |
|
15 | kallithea.model.scm | |
16 | ~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Scm model for Kallithea |
|
18 | Scm model for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Apr 9, 2010 |
|
22 | :created_on: Apr 9, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import os |
|
29 | import os | |
30 | import posixpath |
|
30 | import posixpath | |
31 | import re |
|
31 | import re | |
32 | import sys |
|
32 | import sys | |
33 | import traceback |
|
33 | import traceback | |
34 |
|
34 | |||
35 | import pkg_resources |
|
35 | import pkg_resources | |
36 | from tg.i18n import ugettext as _ |
|
36 | from tg.i18n import ugettext as _ | |
37 |
|
37 | |||
38 | import kallithea |
|
38 | import kallithea | |
39 | from kallithea import BACKENDS |
|
|||
40 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel |
|
39 | from kallithea.lib.auth import HasPermissionAny, HasRepoGroupPermissionLevel, HasRepoPermissionLevel, HasUserGroupPermissionLevel | |
41 | from kallithea.lib.exceptions import IMCCommitError, NonRelativePathError |
|
40 | from kallithea.lib.exceptions import IMCCommitError, NonRelativePathError | |
42 | from kallithea.lib.hooks import process_pushed_raw_ids |
|
41 | from kallithea.lib.hooks import process_pushed_raw_ids | |
43 | from kallithea.lib.utils import action_logger, get_filesystem_repos, make_ui |
|
42 | from kallithea.lib.utils import action_logger, get_filesystem_repos, make_ui | |
44 | from kallithea.lib.utils2 import safe_bytes, set_hook_environment |
|
43 | from kallithea.lib.utils2 import safe_bytes, set_hook_environment | |
45 | from kallithea.lib.vcs import get_backend |
|
44 | from kallithea.lib.vcs import get_backend | |
46 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
45 | from kallithea.lib.vcs.backends.base import EmptyChangeset | |
47 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
46 | from kallithea.lib.vcs.exceptions import RepositoryError | |
48 | from kallithea.lib.vcs.nodes import FileNode |
|
47 | from kallithea.lib.vcs.nodes import FileNode | |
49 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
48 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
50 | from kallithea.model.db import PullRequest, RepoGroup, Repository, Session, Ui, User, UserFollowing, UserLog |
|
49 | from kallithea.model.db import PullRequest, RepoGroup, Repository, Session, Ui, User, UserFollowing, UserLog | |
51 |
|
50 | |||
52 |
|
51 | |||
53 | log = logging.getLogger(__name__) |
|
52 | log = logging.getLogger(__name__) | |
54 |
|
53 | |||
55 |
|
54 | |||
56 | class UserTemp(object): |
|
55 | class UserTemp(object): | |
57 | def __init__(self, user_id): |
|
56 | def __init__(self, user_id): | |
58 | self.user_id = user_id |
|
57 | self.user_id = user_id | |
59 |
|
58 | |||
60 | def __repr__(self): |
|
59 | def __repr__(self): | |
61 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) |
|
60 | return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id) | |
62 |
|
61 | |||
63 |
|
62 | |||
64 | class RepoTemp(object): |
|
63 | class RepoTemp(object): | |
65 | def __init__(self, repo_id): |
|
64 | def __init__(self, repo_id): | |
66 | self.repo_id = repo_id |
|
65 | self.repo_id = repo_id | |
67 |
|
66 | |||
68 | def __repr__(self): |
|
67 | def __repr__(self): | |
69 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) |
|
68 | return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id) | |
70 |
|
69 | |||
71 |
|
70 | |||
72 | class _PermCheckIterator(object): |
|
71 | class _PermCheckIterator(object): | |
73 | def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None): |
|
72 | def __init__(self, obj_list, obj_attr, perm_set, perm_checker, extra_kwargs=None): | |
74 | """ |
|
73 | """ | |
75 | Creates iterator from given list of objects, additionally |
|
74 | Creates iterator from given list of objects, additionally | |
76 | checking permission for them from perm_set var |
|
75 | checking permission for them from perm_set var | |
77 |
|
76 | |||
78 | :param obj_list: list of db objects |
|
77 | :param obj_list: list of db objects | |
79 | :param obj_attr: attribute of object to pass into perm_checker |
|
78 | :param obj_attr: attribute of object to pass into perm_checker | |
80 | :param perm_set: list of permissions to check |
|
79 | :param perm_set: list of permissions to check | |
81 | :param perm_checker: callable to check permissions against |
|
80 | :param perm_checker: callable to check permissions against | |
82 | """ |
|
81 | """ | |
83 | self.obj_list = obj_list |
|
82 | self.obj_list = obj_list | |
84 | self.obj_attr = obj_attr |
|
83 | self.obj_attr = obj_attr | |
85 | self.perm_set = perm_set |
|
84 | self.perm_set = perm_set | |
86 | self.perm_checker = perm_checker |
|
85 | self.perm_checker = perm_checker | |
87 | self.extra_kwargs = extra_kwargs or {} |
|
86 | self.extra_kwargs = extra_kwargs or {} | |
88 |
|
87 | |||
89 | def __len__(self): |
|
88 | def __len__(self): | |
90 | return len(self.obj_list) |
|
89 | return len(self.obj_list) | |
91 |
|
90 | |||
92 | def __repr__(self): |
|
91 | def __repr__(self): | |
93 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) |
|
92 | return '<%s (%s)>' % (self.__class__.__name__, self.__len__()) | |
94 |
|
93 | |||
95 | def __iter__(self): |
|
94 | def __iter__(self): | |
96 | for db_obj in self.obj_list: |
|
95 | for db_obj in self.obj_list: | |
97 | # check permission at this level |
|
96 | # check permission at this level | |
98 | name = getattr(db_obj, self.obj_attr, None) |
|
97 | name = getattr(db_obj, self.obj_attr, None) | |
99 | if not self.perm_checker(*self.perm_set)( |
|
98 | if not self.perm_checker(*self.perm_set)( | |
100 | name, self.__class__.__name__, **self.extra_kwargs): |
|
99 | name, self.__class__.__name__, **self.extra_kwargs): | |
101 | continue |
|
100 | continue | |
102 |
|
101 | |||
103 | yield db_obj |
|
102 | yield db_obj | |
104 |
|
103 | |||
105 |
|
104 | |||
106 | class RepoList(_PermCheckIterator): |
|
105 | class RepoList(_PermCheckIterator): | |
107 |
|
106 | |||
108 | def __init__(self, db_repo_list, perm_level, extra_kwargs=None): |
|
107 | def __init__(self, db_repo_list, perm_level, extra_kwargs=None): | |
109 | super(RepoList, self).__init__(obj_list=db_repo_list, |
|
108 | super(RepoList, self).__init__(obj_list=db_repo_list, | |
110 | obj_attr='repo_name', perm_set=[perm_level], |
|
109 | obj_attr='repo_name', perm_set=[perm_level], | |
111 | perm_checker=HasRepoPermissionLevel, |
|
110 | perm_checker=HasRepoPermissionLevel, | |
112 | extra_kwargs=extra_kwargs) |
|
111 | extra_kwargs=extra_kwargs) | |
113 |
|
112 | |||
114 |
|
113 | |||
115 | class RepoGroupList(_PermCheckIterator): |
|
114 | class RepoGroupList(_PermCheckIterator): | |
116 |
|
115 | |||
117 | def __init__(self, db_repo_group_list, perm_level, extra_kwargs=None): |
|
116 | def __init__(self, db_repo_group_list, perm_level, extra_kwargs=None): | |
118 | super(RepoGroupList, self).__init__(obj_list=db_repo_group_list, |
|
117 | super(RepoGroupList, self).__init__(obj_list=db_repo_group_list, | |
119 | obj_attr='group_name', perm_set=[perm_level], |
|
118 | obj_attr='group_name', perm_set=[perm_level], | |
120 | perm_checker=HasRepoGroupPermissionLevel, |
|
119 | perm_checker=HasRepoGroupPermissionLevel, | |
121 | extra_kwargs=extra_kwargs) |
|
120 | extra_kwargs=extra_kwargs) | |
122 |
|
121 | |||
123 |
|
122 | |||
124 | class UserGroupList(_PermCheckIterator): |
|
123 | class UserGroupList(_PermCheckIterator): | |
125 |
|
124 | |||
126 | def __init__(self, db_user_group_list, perm_level, extra_kwargs=None): |
|
125 | def __init__(self, db_user_group_list, perm_level, extra_kwargs=None): | |
127 | super(UserGroupList, self).__init__(obj_list=db_user_group_list, |
|
126 | super(UserGroupList, self).__init__(obj_list=db_user_group_list, | |
128 | obj_attr='users_group_name', perm_set=[perm_level], |
|
127 | obj_attr='users_group_name', perm_set=[perm_level], | |
129 | perm_checker=HasUserGroupPermissionLevel, |
|
128 | perm_checker=HasUserGroupPermissionLevel, | |
130 | extra_kwargs=extra_kwargs) |
|
129 | extra_kwargs=extra_kwargs) | |
131 |
|
130 | |||
132 |
|
131 | |||
133 | class ScmModel(object): |
|
132 | class ScmModel(object): | |
134 | """ |
|
133 | """ | |
135 | Generic Scm Model |
|
134 | Generic Scm Model | |
136 | """ |
|
135 | """ | |
137 |
|
136 | |||
138 | def __get_repo(self, instance): |
|
137 | def __get_repo(self, instance): | |
139 | cls = Repository |
|
138 | cls = Repository | |
140 | if isinstance(instance, cls): |
|
139 | if isinstance(instance, cls): | |
141 | return instance |
|
140 | return instance | |
142 | elif isinstance(instance, int): |
|
141 | elif isinstance(instance, int): | |
143 | return cls.get(instance) |
|
142 | return cls.get(instance) | |
144 | elif isinstance(instance, str): |
|
143 | elif isinstance(instance, str): | |
145 | if instance.isdigit(): |
|
144 | if instance.isdigit(): | |
146 | return cls.get(int(instance)) |
|
145 | return cls.get(int(instance)) | |
147 | return cls.get_by_repo_name(instance) |
|
146 | return cls.get_by_repo_name(instance) | |
148 | raise Exception('given object must be int, basestr or Instance' |
|
147 | raise Exception('given object must be int, basestr or Instance' | |
149 | ' of %s got %s' % (type(cls), type(instance))) |
|
148 | ' of %s got %s' % (type(cls), type(instance))) | |
150 |
|
149 | |||
151 | @LazyProperty |
|
150 | @LazyProperty | |
152 | def repos_path(self): |
|
151 | def repos_path(self): | |
153 | """ |
|
152 | """ | |
154 | Gets the repositories root path from database |
|
153 | Gets the repositories root path from database | |
155 | """ |
|
154 | """ | |
156 |
|
155 | |||
157 | q = Ui.query().filter(Ui.ui_key == '/').one() |
|
156 | q = Ui.query().filter(Ui.ui_key == '/').one() | |
158 |
|
157 | |||
159 | return q.ui_value |
|
158 | return q.ui_value | |
160 |
|
159 | |||
161 | def repo_scan(self, repos_path=None): |
|
160 | def repo_scan(self, repos_path=None): | |
162 | """ |
|
161 | """ | |
163 | Listing of repositories in given path. This path should not be a |
|
162 | Listing of repositories in given path. This path should not be a | |
164 | repository itself. Return a dictionary of repository objects mapping to |
|
163 | repository itself. Return a dictionary of repository objects mapping to | |
165 | vcs instances. |
|
164 | vcs instances. | |
166 |
|
165 | |||
167 | :param repos_path: path to directory containing repositories |
|
166 | :param repos_path: path to directory containing repositories | |
168 | """ |
|
167 | """ | |
169 |
|
168 | |||
170 | if repos_path is None: |
|
169 | if repos_path is None: | |
171 | repos_path = self.repos_path |
|
170 | repos_path = self.repos_path | |
172 |
|
171 | |||
173 | log.info('scanning for repositories in %s', repos_path) |
|
172 | log.info('scanning for repositories in %s', repos_path) | |
174 |
|
173 | |||
175 | baseui = make_ui() |
|
174 | baseui = make_ui() | |
176 | repos = {} |
|
175 | repos = {} | |
177 |
|
176 | |||
178 | for name, path in get_filesystem_repos(repos_path): |
|
177 | for name, path in get_filesystem_repos(repos_path): | |
179 | # name need to be decomposed and put back together using the / |
|
178 | # name need to be decomposed and put back together using the / | |
180 | # since this is internal storage separator for kallithea |
|
179 | # since this is internal storage separator for kallithea | |
181 | name = Repository.normalize_repo_name(name) |
|
180 | name = Repository.normalize_repo_name(name) | |
182 |
|
181 | |||
183 | try: |
|
182 | try: | |
184 | if name in repos: |
|
183 | if name in repos: | |
185 | raise RepositoryError('Duplicate repository name %s ' |
|
184 | raise RepositoryError('Duplicate repository name %s ' | |
186 | 'found in %s' % (name, path)) |
|
185 | 'found in %s' % (name, path)) | |
187 | else: |
|
186 | else: | |
188 |
|
187 | |||
189 | klass = get_backend(path[0]) |
|
188 | klass = get_backend(path[0]) | |
190 |
|
189 | |||
191 | if path[0] == 'hg' and path[0] in BACKENDS: |
|
190 | if path[0] == 'hg' and path[0] in kallithea.BACKENDS: | |
192 | repos[name] = klass(path[1], baseui=baseui) |
|
191 | repos[name] = klass(path[1], baseui=baseui) | |
193 |
|
192 | |||
194 | if path[0] == 'git' and path[0] in BACKENDS: |
|
193 | if path[0] == 'git' and path[0] in kallithea.BACKENDS: | |
195 | repos[name] = klass(path[1]) |
|
194 | repos[name] = klass(path[1]) | |
196 | except OSError: |
|
195 | except OSError: | |
197 | continue |
|
196 | continue | |
198 | log.debug('found %s paths with repositories', len(repos)) |
|
197 | log.debug('found %s paths with repositories', len(repos)) | |
199 | return repos |
|
198 | return repos | |
200 |
|
199 | |||
201 | def get_repos(self, repos): |
|
200 | def get_repos(self, repos): | |
202 | """Return the repos the user has access to""" |
|
201 | """Return the repos the user has access to""" | |
203 | return RepoList(repos, perm_level='read') |
|
202 | return RepoList(repos, perm_level='read') | |
204 |
|
203 | |||
205 | def get_repo_groups(self, groups=None): |
|
204 | def get_repo_groups(self, groups=None): | |
206 | """Return the repo groups the user has access to |
|
205 | """Return the repo groups the user has access to | |
207 | If no groups are specified, use top level groups. |
|
206 | If no groups are specified, use top level groups. | |
208 | """ |
|
207 | """ | |
209 | if groups is None: |
|
208 | if groups is None: | |
210 | groups = RepoGroup.query() \ |
|
209 | groups = RepoGroup.query() \ | |
211 | .filter(RepoGroup.parent_group_id == None).all() |
|
210 | .filter(RepoGroup.parent_group_id == None).all() | |
212 | return RepoGroupList(groups, perm_level='read') |
|
211 | return RepoGroupList(groups, perm_level='read') | |
213 |
|
212 | |||
214 | def mark_for_invalidation(self, repo_name): |
|
213 | def mark_for_invalidation(self, repo_name): | |
215 | """ |
|
214 | """ | |
216 | Mark caches of this repo invalid in the database. |
|
215 | Mark caches of this repo invalid in the database. | |
217 |
|
216 | |||
218 | :param repo_name: the repo for which caches should be marked invalid |
|
217 | :param repo_name: the repo for which caches should be marked invalid | |
219 | """ |
|
218 | """ | |
220 | log.debug("Marking %s as invalidated and update cache", repo_name) |
|
219 | log.debug("Marking %s as invalidated and update cache", repo_name) | |
221 | repo = Repository.get_by_repo_name(repo_name) |
|
220 | repo = Repository.get_by_repo_name(repo_name) | |
222 | if repo is not None: |
|
221 | if repo is not None: | |
223 | repo.set_invalidate() |
|
222 | repo.set_invalidate() | |
224 | repo.update_changeset_cache() |
|
223 | repo.update_changeset_cache() | |
225 |
|
224 | |||
226 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
225 | def toggle_following_repo(self, follow_repo_id, user_id): | |
227 |
|
226 | |||
228 | f = UserFollowing.query() \ |
|
227 | f = UserFollowing.query() \ | |
229 | .filter(UserFollowing.follows_repository_id == follow_repo_id) \ |
|
228 | .filter(UserFollowing.follows_repository_id == follow_repo_id) \ | |
230 | .filter(UserFollowing.user_id == user_id).scalar() |
|
229 | .filter(UserFollowing.user_id == user_id).scalar() | |
231 |
|
230 | |||
232 | if f is not None: |
|
231 | if f is not None: | |
233 | try: |
|
232 | try: | |
234 | Session().delete(f) |
|
233 | Session().delete(f) | |
235 | action_logger(UserTemp(user_id), |
|
234 | action_logger(UserTemp(user_id), | |
236 | 'stopped_following_repo', |
|
235 | 'stopped_following_repo', | |
237 | RepoTemp(follow_repo_id)) |
|
236 | RepoTemp(follow_repo_id)) | |
238 | return |
|
237 | return | |
239 | except Exception: |
|
238 | except Exception: | |
240 | log.error(traceback.format_exc()) |
|
239 | log.error(traceback.format_exc()) | |
241 | raise |
|
240 | raise | |
242 |
|
241 | |||
243 | try: |
|
242 | try: | |
244 | f = UserFollowing() |
|
243 | f = UserFollowing() | |
245 | f.user_id = user_id |
|
244 | f.user_id = user_id | |
246 | f.follows_repository_id = follow_repo_id |
|
245 | f.follows_repository_id = follow_repo_id | |
247 | Session().add(f) |
|
246 | Session().add(f) | |
248 |
|
247 | |||
249 | action_logger(UserTemp(user_id), |
|
248 | action_logger(UserTemp(user_id), | |
250 | 'started_following_repo', |
|
249 | 'started_following_repo', | |
251 | RepoTemp(follow_repo_id)) |
|
250 | RepoTemp(follow_repo_id)) | |
252 | except Exception: |
|
251 | except Exception: | |
253 | log.error(traceback.format_exc()) |
|
252 | log.error(traceback.format_exc()) | |
254 | raise |
|
253 | raise | |
255 |
|
254 | |||
256 | def toggle_following_user(self, follow_user_id, user_id): |
|
255 | def toggle_following_user(self, follow_user_id, user_id): | |
257 | f = UserFollowing.query() \ |
|
256 | f = UserFollowing.query() \ | |
258 | .filter(UserFollowing.follows_user_id == follow_user_id) \ |
|
257 | .filter(UserFollowing.follows_user_id == follow_user_id) \ | |
259 | .filter(UserFollowing.user_id == user_id).scalar() |
|
258 | .filter(UserFollowing.user_id == user_id).scalar() | |
260 |
|
259 | |||
261 | if f is not None: |
|
260 | if f is not None: | |
262 | try: |
|
261 | try: | |
263 | Session().delete(f) |
|
262 | Session().delete(f) | |
264 | return |
|
263 | return | |
265 | except Exception: |
|
264 | except Exception: | |
266 | log.error(traceback.format_exc()) |
|
265 | log.error(traceback.format_exc()) | |
267 | raise |
|
266 | raise | |
268 |
|
267 | |||
269 | try: |
|
268 | try: | |
270 | f = UserFollowing() |
|
269 | f = UserFollowing() | |
271 | f.user_id = user_id |
|
270 | f.user_id = user_id | |
272 | f.follows_user_id = follow_user_id |
|
271 | f.follows_user_id = follow_user_id | |
273 | Session().add(f) |
|
272 | Session().add(f) | |
274 | except Exception: |
|
273 | except Exception: | |
275 | log.error(traceback.format_exc()) |
|
274 | log.error(traceback.format_exc()) | |
276 | raise |
|
275 | raise | |
277 |
|
276 | |||
278 | def is_following_repo(self, repo_name, user_id): |
|
277 | def is_following_repo(self, repo_name, user_id): | |
279 | r = Repository.query() \ |
|
278 | r = Repository.query() \ | |
280 | .filter(Repository.repo_name == repo_name).scalar() |
|
279 | .filter(Repository.repo_name == repo_name).scalar() | |
281 |
|
280 | |||
282 | f = UserFollowing.query() \ |
|
281 | f = UserFollowing.query() \ | |
283 | .filter(UserFollowing.follows_repository == r) \ |
|
282 | .filter(UserFollowing.follows_repository == r) \ | |
284 | .filter(UserFollowing.user_id == user_id).scalar() |
|
283 | .filter(UserFollowing.user_id == user_id).scalar() | |
285 |
|
284 | |||
286 | return f is not None |
|
285 | return f is not None | |
287 |
|
286 | |||
288 | def is_following_user(self, username, user_id): |
|
287 | def is_following_user(self, username, user_id): | |
289 | u = User.get_by_username(username) |
|
288 | u = User.get_by_username(username) | |
290 |
|
289 | |||
291 | f = UserFollowing.query() \ |
|
290 | f = UserFollowing.query() \ | |
292 | .filter(UserFollowing.follows_user == u) \ |
|
291 | .filter(UserFollowing.follows_user == u) \ | |
293 | .filter(UserFollowing.user_id == user_id).scalar() |
|
292 | .filter(UserFollowing.user_id == user_id).scalar() | |
294 |
|
293 | |||
295 | return f is not None |
|
294 | return f is not None | |
296 |
|
295 | |||
297 | def get_followers(self, repo): |
|
296 | def get_followers(self, repo): | |
298 | repo = Repository.guess_instance(repo) |
|
297 | repo = Repository.guess_instance(repo) | |
299 |
|
298 | |||
300 | return UserFollowing.query() \ |
|
299 | return UserFollowing.query() \ | |
301 | .filter(UserFollowing.follows_repository == repo).count() |
|
300 | .filter(UserFollowing.follows_repository == repo).count() | |
302 |
|
301 | |||
303 | def get_forks(self, repo): |
|
302 | def get_forks(self, repo): | |
304 | repo = Repository.guess_instance(repo) |
|
303 | repo = Repository.guess_instance(repo) | |
305 | return Repository.query() \ |
|
304 | return Repository.query() \ | |
306 | .filter(Repository.fork == repo).count() |
|
305 | .filter(Repository.fork == repo).count() | |
307 |
|
306 | |||
308 | def get_pull_requests(self, repo): |
|
307 | def get_pull_requests(self, repo): | |
309 | repo = Repository.guess_instance(repo) |
|
308 | repo = Repository.guess_instance(repo) | |
310 | return PullRequest.query() \ |
|
309 | return PullRequest.query() \ | |
311 | .filter(PullRequest.other_repo == repo) \ |
|
310 | .filter(PullRequest.other_repo == repo) \ | |
312 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
311 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | |
313 |
|
312 | |||
314 | def mark_as_fork(self, repo, fork, user): |
|
313 | def mark_as_fork(self, repo, fork, user): | |
315 | repo = self.__get_repo(repo) |
|
314 | repo = self.__get_repo(repo) | |
316 | fork = self.__get_repo(fork) |
|
315 | fork = self.__get_repo(fork) | |
317 | if fork and repo.repo_id == fork.repo_id: |
|
316 | if fork and repo.repo_id == fork.repo_id: | |
318 | raise Exception("Cannot set repository as fork of itself") |
|
317 | raise Exception("Cannot set repository as fork of itself") | |
319 |
|
318 | |||
320 | if fork and repo.repo_type != fork.repo_type: |
|
319 | if fork and repo.repo_type != fork.repo_type: | |
321 | raise RepositoryError("Cannot set repository as fork of repository with other type") |
|
320 | raise RepositoryError("Cannot set repository as fork of repository with other type") | |
322 |
|
321 | |||
323 | repo.fork = fork |
|
322 | repo.fork = fork | |
324 | return repo |
|
323 | return repo | |
325 |
|
324 | |||
326 | def _handle_push(self, repo, username, ip_addr, action, repo_name, revisions): |
|
325 | def _handle_push(self, repo, username, ip_addr, action, repo_name, revisions): | |
327 | """ |
|
326 | """ | |
328 | Handle that the repository has changed. |
|
327 | Handle that the repository has changed. | |
329 | Adds an action log entry with the new revisions, and the head revision |
|
328 | Adds an action log entry with the new revisions, and the head revision | |
330 | cache and in-memory caches are invalidated/updated. |
|
329 | cache and in-memory caches are invalidated/updated. | |
331 |
|
330 | |||
332 | :param username: username who pushes |
|
331 | :param username: username who pushes | |
333 | :param action: push/push_local/push_remote |
|
332 | :param action: push/push_local/push_remote | |
334 | :param repo_name: name of repo |
|
333 | :param repo_name: name of repo | |
335 | :param revisions: list of revisions that we pushed |
|
334 | :param revisions: list of revisions that we pushed | |
336 | """ |
|
335 | """ | |
337 | set_hook_environment(username, ip_addr, repo_name, repo_alias=repo.alias, action=action) |
|
336 | set_hook_environment(username, ip_addr, repo_name, repo_alias=repo.alias, action=action) | |
338 | process_pushed_raw_ids(revisions) # also calls mark_for_invalidation |
|
337 | process_pushed_raw_ids(revisions) # also calls mark_for_invalidation | |
339 |
|
338 | |||
340 | def _get_IMC_module(self, scm_type): |
|
339 | def _get_IMC_module(self, scm_type): | |
341 | """ |
|
340 | """ | |
342 | Returns InMemoryCommit class based on scm_type |
|
341 | Returns InMemoryCommit class based on scm_type | |
343 |
|
342 | |||
344 | :param scm_type: |
|
343 | :param scm_type: | |
345 | """ |
|
344 | """ | |
346 | if scm_type == 'hg': |
|
345 | if scm_type == 'hg': | |
347 | from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset |
|
346 | from kallithea.lib.vcs.backends.hg import MercurialInMemoryChangeset | |
348 | return MercurialInMemoryChangeset |
|
347 | return MercurialInMemoryChangeset | |
349 |
|
348 | |||
350 | if scm_type == 'git': |
|
349 | if scm_type == 'git': | |
351 | from kallithea.lib.vcs.backends.git import GitInMemoryChangeset |
|
350 | from kallithea.lib.vcs.backends.git import GitInMemoryChangeset | |
352 | return GitInMemoryChangeset |
|
351 | return GitInMemoryChangeset | |
353 |
|
352 | |||
354 | raise Exception('Invalid scm_type, must be one of hg,git got %s' |
|
353 | raise Exception('Invalid scm_type, must be one of hg,git got %s' | |
355 | % (scm_type,)) |
|
354 | % (scm_type,)) | |
356 |
|
355 | |||
357 | def pull_changes(self, repo, username, ip_addr, clone_uri=None): |
|
356 | def pull_changes(self, repo, username, ip_addr, clone_uri=None): | |
358 | """ |
|
357 | """ | |
359 | Pull from "clone URL" or fork origin. |
|
358 | Pull from "clone URL" or fork origin. | |
360 | """ |
|
359 | """ | |
361 | dbrepo = self.__get_repo(repo) |
|
360 | dbrepo = self.__get_repo(repo) | |
362 | if clone_uri is None: |
|
361 | if clone_uri is None: | |
363 | clone_uri = dbrepo.clone_uri or dbrepo.fork and dbrepo.fork.repo_full_path |
|
362 | clone_uri = dbrepo.clone_uri or dbrepo.fork and dbrepo.fork.repo_full_path | |
364 | if not clone_uri: |
|
363 | if not clone_uri: | |
365 | raise Exception("This repository doesn't have a clone uri") |
|
364 | raise Exception("This repository doesn't have a clone uri") | |
366 |
|
365 | |||
367 | repo = dbrepo.scm_instance |
|
366 | repo = dbrepo.scm_instance | |
368 | repo_name = dbrepo.repo_name |
|
367 | repo_name = dbrepo.repo_name | |
369 | try: |
|
368 | try: | |
370 | if repo.alias == 'git': |
|
369 | if repo.alias == 'git': | |
371 | repo.fetch(clone_uri) |
|
370 | repo.fetch(clone_uri) | |
372 | # git doesn't really have something like post-fetch action |
|
371 | # git doesn't really have something like post-fetch action | |
373 | # we fake that now. |
|
372 | # we fake that now. | |
374 | # TODO: extract fetched revisions ... somehow ... |
|
373 | # TODO: extract fetched revisions ... somehow ... | |
375 | self._handle_push(repo, |
|
374 | self._handle_push(repo, | |
376 | username=username, |
|
375 | username=username, | |
377 | ip_addr=ip_addr, |
|
376 | ip_addr=ip_addr, | |
378 | action='push_remote', |
|
377 | action='push_remote', | |
379 | repo_name=repo_name, |
|
378 | repo_name=repo_name, | |
380 | revisions=[]) |
|
379 | revisions=[]) | |
381 | else: |
|
380 | else: | |
382 | set_hook_environment(username, ip_addr, dbrepo.repo_name, |
|
381 | set_hook_environment(username, ip_addr, dbrepo.repo_name, | |
383 | repo.alias, action='push_remote') |
|
382 | repo.alias, action='push_remote') | |
384 | repo.pull(clone_uri) |
|
383 | repo.pull(clone_uri) | |
385 | except Exception: |
|
384 | except Exception: | |
386 | log.error(traceback.format_exc()) |
|
385 | log.error(traceback.format_exc()) | |
387 | raise |
|
386 | raise | |
388 |
|
387 | |||
389 | def commit_change(self, repo, repo_name, cs, user, ip_addr, author, message, |
|
388 | def commit_change(self, repo, repo_name, cs, user, ip_addr, author, message, | |
390 | content, f_path): |
|
389 | content, f_path): | |
391 | """ |
|
390 | """ | |
392 | Commit a change to a single file |
|
391 | Commit a change to a single file | |
393 |
|
392 | |||
394 | :param repo: a db_repo.scm_instance |
|
393 | :param repo: a db_repo.scm_instance | |
395 | """ |
|
394 | """ | |
396 | user = User.guess_instance(user) |
|
395 | user = User.guess_instance(user) | |
397 | IMC = self._get_IMC_module(repo.alias) |
|
396 | IMC = self._get_IMC_module(repo.alias) | |
398 | imc = IMC(repo) |
|
397 | imc = IMC(repo) | |
399 | imc.change(FileNode(f_path, content, mode=cs.get_file_mode(f_path))) |
|
398 | imc.change(FileNode(f_path, content, mode=cs.get_file_mode(f_path))) | |
400 | try: |
|
399 | try: | |
401 | tip = imc.commit(message=message, author=author, |
|
400 | tip = imc.commit(message=message, author=author, | |
402 | parents=[cs], branch=cs.branch) |
|
401 | parents=[cs], branch=cs.branch) | |
403 | except Exception as e: |
|
402 | except Exception as e: | |
404 | log.error(traceback.format_exc()) |
|
403 | log.error(traceback.format_exc()) | |
405 | # clear caches - we also want a fresh object if commit fails |
|
404 | # clear caches - we also want a fresh object if commit fails | |
406 | self.mark_for_invalidation(repo_name) |
|
405 | self.mark_for_invalidation(repo_name) | |
407 | raise IMCCommitError(str(e)) |
|
406 | raise IMCCommitError(str(e)) | |
408 | self._handle_push(repo, |
|
407 | self._handle_push(repo, | |
409 | username=user.username, |
|
408 | username=user.username, | |
410 | ip_addr=ip_addr, |
|
409 | ip_addr=ip_addr, | |
411 | action='push_local', |
|
410 | action='push_local', | |
412 | repo_name=repo_name, |
|
411 | repo_name=repo_name, | |
413 | revisions=[tip.raw_id]) |
|
412 | revisions=[tip.raw_id]) | |
414 | return tip |
|
413 | return tip | |
415 |
|
414 | |||
416 | def _sanitize_path(self, f_path): |
|
415 | def _sanitize_path(self, f_path): | |
417 | if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path: |
|
416 | if f_path.startswith('/') or f_path.startswith('.') or '../' in f_path: | |
418 | raise NonRelativePathError('%s is not an relative path' % f_path) |
|
417 | raise NonRelativePathError('%s is not an relative path' % f_path) | |
419 | if f_path: |
|
418 | if f_path: | |
420 | f_path = posixpath.normpath(f_path) |
|
419 | f_path = posixpath.normpath(f_path) | |
421 | return f_path |
|
420 | return f_path | |
422 |
|
421 | |||
423 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): |
|
422 | def get_nodes(self, repo_name, revision, root_path='/', flat=True): | |
424 | """ |
|
423 | """ | |
425 | Recursively walk root dir and return a set of all paths found. |
|
424 | Recursively walk root dir and return a set of all paths found. | |
426 |
|
425 | |||
427 | :param repo_name: name of repository |
|
426 | :param repo_name: name of repository | |
428 | :param revision: revision for which to list nodes |
|
427 | :param revision: revision for which to list nodes | |
429 | :param root_path: root path to list |
|
428 | :param root_path: root path to list | |
430 | :param flat: return as a list, if False returns a dict with description |
|
429 | :param flat: return as a list, if False returns a dict with description | |
431 |
|
430 | |||
432 | """ |
|
431 | """ | |
433 | _files = list() |
|
432 | _files = list() | |
434 | _dirs = list() |
|
433 | _dirs = list() | |
435 | try: |
|
434 | try: | |
436 | _repo = self.__get_repo(repo_name) |
|
435 | _repo = self.__get_repo(repo_name) | |
437 | changeset = _repo.scm_instance.get_changeset(revision) |
|
436 | changeset = _repo.scm_instance.get_changeset(revision) | |
438 | root_path = root_path.lstrip('/') |
|
437 | root_path = root_path.lstrip('/') | |
439 | for topnode, dirs, files in changeset.walk(root_path): |
|
438 | for topnode, dirs, files in changeset.walk(root_path): | |
440 | for f in files: |
|
439 | for f in files: | |
441 | _files.append(f.path if flat else {"name": f.path, |
|
440 | _files.append(f.path if flat else {"name": f.path, | |
442 | "type": "file"}) |
|
441 | "type": "file"}) | |
443 | for d in dirs: |
|
442 | for d in dirs: | |
444 | _dirs.append(d.path if flat else {"name": d.path, |
|
443 | _dirs.append(d.path if flat else {"name": d.path, | |
445 | "type": "dir"}) |
|
444 | "type": "dir"}) | |
446 | except RepositoryError: |
|
445 | except RepositoryError: | |
447 | log.debug(traceback.format_exc()) |
|
446 | log.debug(traceback.format_exc()) | |
448 | raise |
|
447 | raise | |
449 |
|
448 | |||
450 | return _dirs, _files |
|
449 | return _dirs, _files | |
451 |
|
450 | |||
452 | def create_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, |
|
451 | def create_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, | |
453 | author=None, trigger_push_hook=True): |
|
452 | author=None, trigger_push_hook=True): | |
454 | """ |
|
453 | """ | |
455 | Commits specified nodes to repo. |
|
454 | Commits specified nodes to repo. | |
456 |
|
455 | |||
457 | :param user: Kallithea User object or user_id, the committer |
|
456 | :param user: Kallithea User object or user_id, the committer | |
458 | :param repo: Kallithea Repository object |
|
457 | :param repo: Kallithea Repository object | |
459 | :param message: commit message |
|
458 | :param message: commit message | |
460 | :param nodes: mapping {filename:{'content':content},...} |
|
459 | :param nodes: mapping {filename:{'content':content},...} | |
461 | :param parent_cs: parent changeset, can be empty than it's initial commit |
|
460 | :param parent_cs: parent changeset, can be empty than it's initial commit | |
462 | :param author: author of commit, cna be different that committer only for git |
|
461 | :param author: author of commit, cna be different that committer only for git | |
463 | :param trigger_push_hook: trigger push hooks |
|
462 | :param trigger_push_hook: trigger push hooks | |
464 |
|
463 | |||
465 | :returns: new committed changeset |
|
464 | :returns: new committed changeset | |
466 | """ |
|
465 | """ | |
467 |
|
466 | |||
468 | user = User.guess_instance(user) |
|
467 | user = User.guess_instance(user) | |
469 | scm_instance = repo.scm_instance_no_cache() |
|
468 | scm_instance = repo.scm_instance_no_cache() | |
470 |
|
469 | |||
471 | processed_nodes = [] |
|
470 | processed_nodes = [] | |
472 | for f_path in nodes: |
|
471 | for f_path in nodes: | |
473 | content = nodes[f_path]['content'] |
|
472 | content = nodes[f_path]['content'] | |
474 | f_path = self._sanitize_path(f_path) |
|
473 | f_path = self._sanitize_path(f_path) | |
475 | if not isinstance(content, str) and not isinstance(content, bytes): |
|
474 | if not isinstance(content, str) and not isinstance(content, bytes): | |
476 | content = content.read() |
|
475 | content = content.read() | |
477 | processed_nodes.append((f_path, content)) |
|
476 | processed_nodes.append((f_path, content)) | |
478 |
|
477 | |||
479 | message = message |
|
478 | message = message | |
480 | committer = user.full_contact |
|
479 | committer = user.full_contact | |
481 | if not author: |
|
480 | if not author: | |
482 | author = committer |
|
481 | author = committer | |
483 |
|
482 | |||
484 | IMC = self._get_IMC_module(scm_instance.alias) |
|
483 | IMC = self._get_IMC_module(scm_instance.alias) | |
485 | imc = IMC(scm_instance) |
|
484 | imc = IMC(scm_instance) | |
486 |
|
485 | |||
487 | if not parent_cs: |
|
486 | if not parent_cs: | |
488 | parent_cs = EmptyChangeset(alias=scm_instance.alias) |
|
487 | parent_cs = EmptyChangeset(alias=scm_instance.alias) | |
489 |
|
488 | |||
490 | if isinstance(parent_cs, EmptyChangeset): |
|
489 | if isinstance(parent_cs, EmptyChangeset): | |
491 | # EmptyChangeset means we we're editing empty repository |
|
490 | # EmptyChangeset means we we're editing empty repository | |
492 | parents = None |
|
491 | parents = None | |
493 | else: |
|
492 | else: | |
494 | parents = [parent_cs] |
|
493 | parents = [parent_cs] | |
495 | # add multiple nodes |
|
494 | # add multiple nodes | |
496 | for path, content in processed_nodes: |
|
495 | for path, content in processed_nodes: | |
497 | imc.add(FileNode(path, content=content)) |
|
496 | imc.add(FileNode(path, content=content)) | |
498 |
|
497 | |||
499 | tip = imc.commit(message=message, |
|
498 | tip = imc.commit(message=message, | |
500 | author=author, |
|
499 | author=author, | |
501 | parents=parents, |
|
500 | parents=parents, | |
502 | branch=parent_cs.branch) |
|
501 | branch=parent_cs.branch) | |
503 |
|
502 | |||
504 | if trigger_push_hook: |
|
503 | if trigger_push_hook: | |
505 | self._handle_push(scm_instance, |
|
504 | self._handle_push(scm_instance, | |
506 | username=user.username, |
|
505 | username=user.username, | |
507 | ip_addr=ip_addr, |
|
506 | ip_addr=ip_addr, | |
508 | action='push_local', |
|
507 | action='push_local', | |
509 | repo_name=repo.repo_name, |
|
508 | repo_name=repo.repo_name, | |
510 | revisions=[tip.raw_id]) |
|
509 | revisions=[tip.raw_id]) | |
511 | else: |
|
510 | else: | |
512 | self.mark_for_invalidation(repo.repo_name) |
|
511 | self.mark_for_invalidation(repo.repo_name) | |
513 | return tip |
|
512 | return tip | |
514 |
|
513 | |||
515 | def update_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, |
|
514 | def update_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, | |
516 | author=None, trigger_push_hook=True): |
|
515 | author=None, trigger_push_hook=True): | |
517 | """ |
|
516 | """ | |
518 | Commits specified nodes to repo. Again. |
|
517 | Commits specified nodes to repo. Again. | |
519 | """ |
|
518 | """ | |
520 | user = User.guess_instance(user) |
|
519 | user = User.guess_instance(user) | |
521 | scm_instance = repo.scm_instance_no_cache() |
|
520 | scm_instance = repo.scm_instance_no_cache() | |
522 |
|
521 | |||
523 | message = message |
|
522 | message = message | |
524 | committer = user.full_contact |
|
523 | committer = user.full_contact | |
525 | if not author: |
|
524 | if not author: | |
526 | author = committer |
|
525 | author = committer | |
527 |
|
526 | |||
528 | imc_class = self._get_IMC_module(scm_instance.alias) |
|
527 | imc_class = self._get_IMC_module(scm_instance.alias) | |
529 | imc = imc_class(scm_instance) |
|
528 | imc = imc_class(scm_instance) | |
530 |
|
529 | |||
531 | if not parent_cs: |
|
530 | if not parent_cs: | |
532 | parent_cs = EmptyChangeset(alias=scm_instance.alias) |
|
531 | parent_cs = EmptyChangeset(alias=scm_instance.alias) | |
533 |
|
532 | |||
534 | if isinstance(parent_cs, EmptyChangeset): |
|
533 | if isinstance(parent_cs, EmptyChangeset): | |
535 | # EmptyChangeset means we we're editing empty repository |
|
534 | # EmptyChangeset means we we're editing empty repository | |
536 | parents = None |
|
535 | parents = None | |
537 | else: |
|
536 | else: | |
538 | parents = [parent_cs] |
|
537 | parents = [parent_cs] | |
539 |
|
538 | |||
540 | # add multiple nodes |
|
539 | # add multiple nodes | |
541 | for _filename, data in nodes.items(): |
|
540 | for _filename, data in nodes.items(): | |
542 | # new filename, can be renamed from the old one |
|
541 | # new filename, can be renamed from the old one | |
543 | filename = self._sanitize_path(data['filename']) |
|
542 | filename = self._sanitize_path(data['filename']) | |
544 | old_filename = self._sanitize_path(_filename) |
|
543 | old_filename = self._sanitize_path(_filename) | |
545 | content = data['content'] |
|
544 | content = data['content'] | |
546 |
|
545 | |||
547 | filenode = FileNode(old_filename, content=content) |
|
546 | filenode = FileNode(old_filename, content=content) | |
548 | op = data['op'] |
|
547 | op = data['op'] | |
549 | if op == 'add': |
|
548 | if op == 'add': | |
550 | imc.add(filenode) |
|
549 | imc.add(filenode) | |
551 | elif op == 'del': |
|
550 | elif op == 'del': | |
552 | imc.remove(filenode) |
|
551 | imc.remove(filenode) | |
553 | elif op == 'mod': |
|
552 | elif op == 'mod': | |
554 | if filename != old_filename: |
|
553 | if filename != old_filename: | |
555 | # TODO: handle renames, needs vcs lib changes |
|
554 | # TODO: handle renames, needs vcs lib changes | |
556 | imc.remove(filenode) |
|
555 | imc.remove(filenode) | |
557 | imc.add(FileNode(filename, content=content)) |
|
556 | imc.add(FileNode(filename, content=content)) | |
558 | else: |
|
557 | else: | |
559 | imc.change(filenode) |
|
558 | imc.change(filenode) | |
560 |
|
559 | |||
561 | # commit changes |
|
560 | # commit changes | |
562 | tip = imc.commit(message=message, |
|
561 | tip = imc.commit(message=message, | |
563 | author=author, |
|
562 | author=author, | |
564 | parents=parents, |
|
563 | parents=parents, | |
565 | branch=parent_cs.branch) |
|
564 | branch=parent_cs.branch) | |
566 |
|
565 | |||
567 | if trigger_push_hook: |
|
566 | if trigger_push_hook: | |
568 | self._handle_push(scm_instance, |
|
567 | self._handle_push(scm_instance, | |
569 | username=user.username, |
|
568 | username=user.username, | |
570 | ip_addr=ip_addr, |
|
569 | ip_addr=ip_addr, | |
571 | action='push_local', |
|
570 | action='push_local', | |
572 | repo_name=repo.repo_name, |
|
571 | repo_name=repo.repo_name, | |
573 | revisions=[tip.raw_id]) |
|
572 | revisions=[tip.raw_id]) | |
574 | else: |
|
573 | else: | |
575 | self.mark_for_invalidation(repo.repo_name) |
|
574 | self.mark_for_invalidation(repo.repo_name) | |
576 |
|
575 | |||
577 | def delete_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, |
|
576 | def delete_nodes(self, user, ip_addr, repo, message, nodes, parent_cs=None, | |
578 | author=None, trigger_push_hook=True): |
|
577 | author=None, trigger_push_hook=True): | |
579 | """ |
|
578 | """ | |
580 | Deletes specified nodes from repo. |
|
579 | Deletes specified nodes from repo. | |
581 |
|
580 | |||
582 | :param user: Kallithea User object or user_id, the committer |
|
581 | :param user: Kallithea User object or user_id, the committer | |
583 | :param repo: Kallithea Repository object |
|
582 | :param repo: Kallithea Repository object | |
584 | :param message: commit message |
|
583 | :param message: commit message | |
585 | :param nodes: mapping {filename:{'content':content},...} |
|
584 | :param nodes: mapping {filename:{'content':content},...} | |
586 | :param parent_cs: parent changeset, can be empty than it's initial commit |
|
585 | :param parent_cs: parent changeset, can be empty than it's initial commit | |
587 | :param author: author of commit, cna be different that committer only for git |
|
586 | :param author: author of commit, cna be different that committer only for git | |
588 | :param trigger_push_hook: trigger push hooks |
|
587 | :param trigger_push_hook: trigger push hooks | |
589 |
|
588 | |||
590 | :returns: new committed changeset after deletion |
|
589 | :returns: new committed changeset after deletion | |
591 | """ |
|
590 | """ | |
592 |
|
591 | |||
593 | user = User.guess_instance(user) |
|
592 | user = User.guess_instance(user) | |
594 | scm_instance = repo.scm_instance_no_cache() |
|
593 | scm_instance = repo.scm_instance_no_cache() | |
595 |
|
594 | |||
596 | processed_nodes = [] |
|
595 | processed_nodes = [] | |
597 | for f_path in nodes: |
|
596 | for f_path in nodes: | |
598 | f_path = self._sanitize_path(f_path) |
|
597 | f_path = self._sanitize_path(f_path) | |
599 | # content can be empty but for compatibility it allows same dicts |
|
598 | # content can be empty but for compatibility it allows same dicts | |
600 | # structure as add_nodes |
|
599 | # structure as add_nodes | |
601 | content = nodes[f_path].get('content') |
|
600 | content = nodes[f_path].get('content') | |
602 | processed_nodes.append((f_path, content)) |
|
601 | processed_nodes.append((f_path, content)) | |
603 |
|
602 | |||
604 | message = message |
|
603 | message = message | |
605 | committer = user.full_contact |
|
604 | committer = user.full_contact | |
606 | if not author: |
|
605 | if not author: | |
607 | author = committer |
|
606 | author = committer | |
608 |
|
607 | |||
609 | IMC = self._get_IMC_module(scm_instance.alias) |
|
608 | IMC = self._get_IMC_module(scm_instance.alias) | |
610 | imc = IMC(scm_instance) |
|
609 | imc = IMC(scm_instance) | |
611 |
|
610 | |||
612 | if not parent_cs: |
|
611 | if not parent_cs: | |
613 | parent_cs = EmptyChangeset(alias=scm_instance.alias) |
|
612 | parent_cs = EmptyChangeset(alias=scm_instance.alias) | |
614 |
|
613 | |||
615 | if isinstance(parent_cs, EmptyChangeset): |
|
614 | if isinstance(parent_cs, EmptyChangeset): | |
616 | # EmptyChangeset means we we're editing empty repository |
|
615 | # EmptyChangeset means we we're editing empty repository | |
617 | parents = None |
|
616 | parents = None | |
618 | else: |
|
617 | else: | |
619 | parents = [parent_cs] |
|
618 | parents = [parent_cs] | |
620 | # add multiple nodes |
|
619 | # add multiple nodes | |
621 | for path, content in processed_nodes: |
|
620 | for path, content in processed_nodes: | |
622 | imc.remove(FileNode(path, content=content)) |
|
621 | imc.remove(FileNode(path, content=content)) | |
623 |
|
622 | |||
624 | tip = imc.commit(message=message, |
|
623 | tip = imc.commit(message=message, | |
625 | author=author, |
|
624 | author=author, | |
626 | parents=parents, |
|
625 | parents=parents, | |
627 | branch=parent_cs.branch) |
|
626 | branch=parent_cs.branch) | |
628 |
|
627 | |||
629 | if trigger_push_hook: |
|
628 | if trigger_push_hook: | |
630 | self._handle_push(scm_instance, |
|
629 | self._handle_push(scm_instance, | |
631 | username=user.username, |
|
630 | username=user.username, | |
632 | ip_addr=ip_addr, |
|
631 | ip_addr=ip_addr, | |
633 | action='push_local', |
|
632 | action='push_local', | |
634 | repo_name=repo.repo_name, |
|
633 | repo_name=repo.repo_name, | |
635 | revisions=[tip.raw_id]) |
|
634 | revisions=[tip.raw_id]) | |
636 | else: |
|
635 | else: | |
637 | self.mark_for_invalidation(repo.repo_name) |
|
636 | self.mark_for_invalidation(repo.repo_name) | |
638 | return tip |
|
637 | return tip | |
639 |
|
638 | |||
640 | def get_unread_journal(self): |
|
639 | def get_unread_journal(self): | |
641 | return UserLog.query().count() |
|
640 | return UserLog.query().count() | |
642 |
|
641 | |||
643 | def get_repo_landing_revs(self, repo=None): |
|
642 | def get_repo_landing_revs(self, repo=None): | |
644 | """ |
|
643 | """ | |
645 | Generates select option with tags branches and bookmarks (for hg only) |
|
644 | Generates select option with tags branches and bookmarks (for hg only) | |
646 | grouped by type |
|
645 | grouped by type | |
647 |
|
646 | |||
648 | :param repo: |
|
647 | :param repo: | |
649 | """ |
|
648 | """ | |
650 |
|
649 | |||
651 | hist_l = [] |
|
650 | hist_l = [] | |
652 | choices = [] |
|
651 | choices = [] | |
653 | hist_l.append(('rev:tip', _('latest tip'))) |
|
652 | hist_l.append(('rev:tip', _('latest tip'))) | |
654 | choices.append('rev:tip') |
|
653 | choices.append('rev:tip') | |
655 | if repo is None: |
|
654 | if repo is None: | |
656 | return choices, hist_l |
|
655 | return choices, hist_l | |
657 |
|
656 | |||
658 | repo = self.__get_repo(repo) |
|
657 | repo = self.__get_repo(repo) | |
659 | repo = repo.scm_instance |
|
658 | repo = repo.scm_instance | |
660 |
|
659 | |||
661 | branches_group = ([('branch:%s' % k, k) for k, v in |
|
660 | branches_group = ([('branch:%s' % k, k) for k, v in | |
662 | repo.branches.items()], _("Branches")) |
|
661 | repo.branches.items()], _("Branches")) | |
663 | hist_l.append(branches_group) |
|
662 | hist_l.append(branches_group) | |
664 | choices.extend([x[0] for x in branches_group[0]]) |
|
663 | choices.extend([x[0] for x in branches_group[0]]) | |
665 |
|
664 | |||
666 | if repo.alias == 'hg': |
|
665 | if repo.alias == 'hg': | |
667 | bookmarks_group = ([('book:%s' % k, k) for k, v in |
|
666 | bookmarks_group = ([('book:%s' % k, k) for k, v in | |
668 | repo.bookmarks.items()], _("Bookmarks")) |
|
667 | repo.bookmarks.items()], _("Bookmarks")) | |
669 | hist_l.append(bookmarks_group) |
|
668 | hist_l.append(bookmarks_group) | |
670 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
669 | choices.extend([x[0] for x in bookmarks_group[0]]) | |
671 |
|
670 | |||
672 | tags_group = ([('tag:%s' % k, k) for k, v in |
|
671 | tags_group = ([('tag:%s' % k, k) for k, v in | |
673 | repo.tags.items()], _("Tags")) |
|
672 | repo.tags.items()], _("Tags")) | |
674 | hist_l.append(tags_group) |
|
673 | hist_l.append(tags_group) | |
675 | choices.extend([x[0] for x in tags_group[0]]) |
|
674 | choices.extend([x[0] for x in tags_group[0]]) | |
676 |
|
675 | |||
677 | return choices, hist_l |
|
676 | return choices, hist_l | |
678 |
|
677 | |||
679 | def _get_git_hook_interpreter(self): |
|
678 | def _get_git_hook_interpreter(self): | |
680 | """Return a suitable interpreter for Git hooks. |
|
679 | """Return a suitable interpreter for Git hooks. | |
681 |
|
680 | |||
682 | Return a suitable string to be written in the POSIX #! shebang line for |
|
681 | Return a suitable string to be written in the POSIX #! shebang line for | |
683 | Git hook scripts so they invoke Kallithea code with the right Python |
|
682 | Git hook scripts so they invoke Kallithea code with the right Python | |
684 | interpreter and in the right environment. |
|
683 | interpreter and in the right environment. | |
685 | """ |
|
684 | """ | |
686 | # Note: sys.executable might not point at a usable Python interpreter. For |
|
685 | # Note: sys.executable might not point at a usable Python interpreter. For | |
687 | # example, when using uwsgi, it will point at the uwsgi program itself. |
|
686 | # example, when using uwsgi, it will point at the uwsgi program itself. | |
688 | # FIXME This may not work on Windows and may need a shell wrapper script. |
|
687 | # FIXME This may not work on Windows and may need a shell wrapper script. | |
689 | return (kallithea.CONFIG.get('git_hook_interpreter') |
|
688 | return (kallithea.CONFIG.get('git_hook_interpreter') | |
690 | or sys.executable |
|
689 | or sys.executable | |
691 | or '/usr/bin/env python3') |
|
690 | or '/usr/bin/env python3') | |
692 |
|
691 | |||
693 | def install_git_hooks(self, repo, force=False): |
|
692 | def install_git_hooks(self, repo, force=False): | |
694 | """ |
|
693 | """ | |
695 | Creates a kallithea hook inside a git repository |
|
694 | Creates a kallithea hook inside a git repository | |
696 |
|
695 | |||
697 | :param repo: Instance of VCS repo |
|
696 | :param repo: Instance of VCS repo | |
698 | :param force: Overwrite existing non-Kallithea hooks |
|
697 | :param force: Overwrite existing non-Kallithea hooks | |
699 | """ |
|
698 | """ | |
700 |
|
699 | |||
701 | hooks_path = os.path.join(repo.path, 'hooks') |
|
700 | hooks_path = os.path.join(repo.path, 'hooks') | |
702 | if not repo.bare: |
|
701 | if not repo.bare: | |
703 | hooks_path = os.path.join(repo.path, '.git', 'hooks') |
|
702 | hooks_path = os.path.join(repo.path, '.git', 'hooks') | |
704 | if not os.path.isdir(hooks_path): |
|
703 | if not os.path.isdir(hooks_path): | |
705 | os.makedirs(hooks_path) |
|
704 | os.makedirs(hooks_path) | |
706 |
|
705 | |||
707 | tmpl_post = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) |
|
706 | tmpl_post = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) | |
708 | tmpl_post += pkg_resources.resource_string( |
|
707 | tmpl_post += pkg_resources.resource_string( | |
709 | 'kallithea', os.path.join('config', 'post_receive_tmpl.py') |
|
708 | 'kallithea', os.path.join('config', 'post_receive_tmpl.py') | |
710 | ) |
|
709 | ) | |
711 | tmpl_pre = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) |
|
710 | tmpl_pre = b"#!%s\n" % safe_bytes(self._get_git_hook_interpreter()) | |
712 | tmpl_pre += pkg_resources.resource_string( |
|
711 | tmpl_pre += pkg_resources.resource_string( | |
713 | 'kallithea', os.path.join('config', 'pre_receive_tmpl.py') |
|
712 | 'kallithea', os.path.join('config', 'pre_receive_tmpl.py') | |
714 | ) |
|
713 | ) | |
715 |
|
714 | |||
716 | for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: |
|
715 | for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]: | |
717 | hook_file = os.path.join(hooks_path, '%s-receive' % h_type) |
|
716 | hook_file = os.path.join(hooks_path, '%s-receive' % h_type) | |
718 | other_hook = False |
|
717 | other_hook = False | |
719 | log.debug('Installing git hook in repo %s', repo) |
|
718 | log.debug('Installing git hook in repo %s', repo) | |
720 | if os.path.exists(hook_file): |
|
719 | if os.path.exists(hook_file): | |
721 | # let's take a look at this hook, maybe it's kallithea ? |
|
720 | # let's take a look at this hook, maybe it's kallithea ? | |
722 | log.debug('hook exists, checking if it is from kallithea') |
|
721 | log.debug('hook exists, checking if it is from kallithea') | |
723 | with open(hook_file, 'rb') as f: |
|
722 | with open(hook_file, 'rb') as f: | |
724 | data = f.read() |
|
723 | data = f.read() | |
725 | matches = re.search(br'^KALLITHEA_HOOK_VER\s*=\s*(.*)$', data, flags=re.MULTILINE) |
|
724 | matches = re.search(br'^KALLITHEA_HOOK_VER\s*=\s*(.*)$', data, flags=re.MULTILINE) | |
726 | if matches: |
|
725 | if matches: | |
727 | ver = matches.groups()[0] |
|
726 | ver = matches.groups()[0] | |
728 | log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %r', ver) |
|
727 | log.debug('Found Kallithea hook - it has KALLITHEA_HOOK_VER %r', ver) | |
729 | else: |
|
728 | else: | |
730 | log.debug('Found non-Kallithea hook at %s', hook_file) |
|
729 | log.debug('Found non-Kallithea hook at %s', hook_file) | |
731 | other_hook = True |
|
730 | other_hook = True | |
732 |
|
731 | |||
733 | if other_hook and not force: |
|
732 | if other_hook and not force: | |
734 | log.warning('skipping overwriting hook file %s', hook_file) |
|
733 | log.warning('skipping overwriting hook file %s', hook_file) | |
735 | else: |
|
734 | else: | |
736 | log.debug('writing %s hook file !', h_type) |
|
735 | log.debug('writing %s hook file !', h_type) | |
737 | try: |
|
736 | try: | |
738 | with open(hook_file, 'wb') as f: |
|
737 | with open(hook_file, 'wb') as f: | |
739 | tmpl = tmpl.replace(b'_TMPL_', safe_bytes(kallithea.__version__)) |
|
738 | tmpl = tmpl.replace(b'_TMPL_', safe_bytes(kallithea.__version__)) | |
740 | f.write(tmpl) |
|
739 | f.write(tmpl) | |
741 | os.chmod(hook_file, 0o755) |
|
740 | os.chmod(hook_file, 0o755) | |
742 | except IOError as e: |
|
741 | except IOError as e: | |
743 | log.error('error writing hook %s: %s', hook_file, e) |
|
742 | log.error('error writing hook %s: %s', hook_file, e) | |
744 |
|
743 | |||
745 |
|
744 | |||
746 | def AvailableRepoGroupChoices(repo_group_perm_level, extras=()): |
|
745 | def AvailableRepoGroupChoices(repo_group_perm_level, extras=()): | |
747 | """Return group_id,string tuples with choices for all the repo groups where |
|
746 | """Return group_id,string tuples with choices for all the repo groups where | |
748 | the user has the necessary permissions. |
|
747 | the user has the necessary permissions. | |
749 |
|
748 | |||
750 | Top level is -1. |
|
749 | Top level is -1. | |
751 | """ |
|
750 | """ | |
752 | groups = RepoGroup.query().all() |
|
751 | groups = RepoGroup.query().all() | |
753 | if HasPermissionAny('hg.admin')('available repo groups'): |
|
752 | if HasPermissionAny('hg.admin')('available repo groups'): | |
754 | groups.append(None) |
|
753 | groups.append(None) | |
755 | else: |
|
754 | else: | |
756 | groups = list(RepoGroupList(groups, perm_level=repo_group_perm_level)) |
|
755 | groups = list(RepoGroupList(groups, perm_level=repo_group_perm_level)) | |
757 | if HasPermissionAny('hg.create.repository')('available repo groups'): |
|
756 | if HasPermissionAny('hg.create.repository')('available repo groups'): | |
758 | groups.append(None) |
|
757 | groups.append(None) | |
759 | for extra in extras: |
|
758 | for extra in extras: | |
760 | if not any(rg == extra for rg in groups): |
|
759 | if not any(rg == extra for rg in groups): | |
761 | groups.append(extra) |
|
760 | groups.append(extra) | |
762 | return RepoGroup.groups_choices(groups=groups) |
|
761 | return RepoGroup.groups_choices(groups=groups) |
General Comments 0
You need to be logged in to leave comments.
Login now