##// END OF EJS Templates
release: Merge default into stable for release preparation
milka -
r4671:d6153155 merge stable
parent child Browse files
Show More
@@ -0,0 +1,76 b''
1 |RCE| 4.25.0 |RNS|
2 ------------------
3
4 Release Date
5 ^^^^^^^^^^^^
6
7 - 2021-04-02
8
9
10 New Features
11 ^^^^^^^^^^^^
12
13 - SSH: allow clone by ID via SSH operations.
14 - Artifacts: added an admin panel to manage artifacts.
15 - Redmine: added option to add note to a ticket without changing its status in Redmine integration.
16
17
18 General
19 ^^^^^^^
20
21 - Git: change lookups logic. Prioritize reference names over numerical ids.
22 Numerical ids are supported as a fallback if ref matching is unsuccessful.
23 - Permissions: changed fork permission help text to reflect the actual state on how it works.
24 - Permissions: flush permissions on owner changes for repo and repo groups. This
25 would fix problems when owner of repository changes then the new owner lacked permissions
26 until cache expired.
27 - Artifacts: added API function to remove artifacts.
28 - Archives: use a special name for non-hashed archives to fix caching issues.
29 - Packaging: fixed few packages requirements for a proper builds.
30 - Packaging: fix rhodecode-tools for docker builds.
31 - Packaging: fixed some problem after latest setuptools-scm release.
32 - Packaging: added setuptools-scm to packages for build.
33 - Packaging: fix jira package for reproducible builds.
34 - Packaging: fix zipp package patches.
35
36
37 Security
38 ^^^^^^^^
39
40 - Comments: forbid removal of comments by anyone except the owners.
41 Previously admins of a repository could remove them if they would construct a special url with data.
42 - Pull requests: fixed some xss problems when a deleted file with special characters were commented on.
43
44
45 Performance
46 ^^^^^^^^^^^
47
48 - License: skip channelstream connect on license checks logic to reduce calls handling times.
49 - Core: optimize some calls to skip license/scm detection on them. Each license check is expensive
50 and we don't need them on each call.
51
52
53 Fixes
54 ^^^^^
55
56 - Branch-permissions: fixed ce view. Fixes #5656
57 - Feed: fix errors on feed access of empty repositories.
58 - Archives: if implicit ref name was used (e.g master) to obtain archive, we now
59 redirect to explicit commit sha so we can have the proper caching for references names.
60 - rcextensions: fixed pre-files extractor return code support.
61 - Svn: fix subprocess problems on some of the calls for file checking.
62 - Pull requests: fixed multiple repetitions of referenced tickets in pull requests summary sidebar.
63 - Maintenance: fixed bad routes def
64 - clone-uri: fixed the problems with key mismatch that caused errors on summary page.
65 - Largefiles: added fix for downloading largefiles which had no extension in file name.
66 - Compare: fix referenced commits bug.
67 - Git: fix for unicode branches
68
69
70 Upgrade notes
71 ^^^^^^^^^^^^^
72
73 - Scheduled release 4.25.0.
74
75
76
@@ -0,0 +1,13 b''
1 diff -rup channelstream-0.6.14-orig/setup.py channelstream-0.6.14/setup.py
2
3 --- channelstream-0.6.14/setup-orig.py 2021-03-11 12:34:45.000000000 +0100
4 +++ channelstream-0.6.14/setup.py 2021-03-11 12:34:56.000000000 +0100
5 @@ -52,7 +52,7 @@ setup(
6 include_package_data=True,
7 install_requires=requires,
8 python_requires=">=2.7",
9 - setup_requires=["pytest-runner"],
10 + setup_requires=["pytest-runner==5.1.0"],
11 extras_require={
12 "dev": ["coverage", "pytest", "pyramid", "tox", "mock", "webtest"],
13 "lint": ["black"],
@@ -0,0 +1,10 b''
1 diff -rup configparser-4.0.2-orig/pyproject.toml configparser-4.0.2/pyproject.toml
2 --- configparser-4.0.2-orig/pyproject.toml 2021-03-22 21:28:11.000000000 +0100
3 +++ configparser-4.0.2/pyproject.toml 2021-03-22 21:28:11.000000000 +0100
4 @@ -1,5 +1,5 @@
5 [build-system]
6 -requires = ["setuptools>=40.7", "wheel", "setuptools_scm>=1.15"]
7 +requires = ["setuptools<=42.0", "wheel", "setuptools_scm<6.0.0"]
8 build-backend = "setuptools.build_meta"
9
10 [tool.black]
@@ -0,0 +1,7 b''
1 diff -rup importlib-metadata-1.6.0-orig/yproject.toml importlib-metadata-1.6.0/pyproject.toml
2 --- importlib-metadata-1.6.0-orig/yproject.toml 2021-03-22 22:10:33.000000000 +0100
3 +++ importlib-metadata-1.6.0/pyproject.toml 2021-03-22 22:11:09.000000000 +0100
4 @@ -1,3 +1,3 @@
5 [build-system]
6 -requires = ["setuptools>=30.3", "wheel", "setuptools_scm"]
7 +requires = ["setuptools<42.0", "wheel", "setuptools_scm<6.0.0"]
@@ -0,0 +1,12 b''
1 diff -rup pyramid-apispec-0.3.2-orig/setup.py pyramid-apispec-0.3.2/setup.py
2 --- pyramid-apispec-0.3.2-orig/setup.py 2021-03-11 11:19:26.000000000 +0100
3 +++ pyramid-apispec-0.3.2/setup.py 2021-03-11 11:19:51.000000000 +0100
4 @@ -44,7 +44,7 @@ setup(
5 packages=find_packages(exclude=["contrib", "docs", "tests"]),
6 package_data={"pyramid_apispec": ["static/*.*"], "": ["LICENSE"]},
7 install_requires=["apispec[yaml]==1.0.0"],
8 - setup_requires=["pytest-runner"],
9 + setup_requires=["pytest-runner==5.1"],
10 extras_require={
11 "dev": ["coverage", "pytest", "pyramid", "tox", "webtest"],
12 "demo": ["marshmallow==2.15.3", "pyramid", "apispec", "webtest"], No newline at end of file
@@ -0,0 +1,12 b''
1 diff -rup rhodecode-tools-1.4.0-orig/setup.py rhodecode-tools-1.4.0/setup.py
2 --- rhodecode-tools-1.4.0/setup-orig.py 2021-03-11 12:34:45.000000000 +0100
3 +++ rhodecode-tools-1.4.0/setup.py 2021-03-11 12:34:56.000000000 +0100
4 @@ -69,7 +69,7 @@ def _get_requirements(req_filename, excl
5
6
7 # requirements extract
8 -setup_requirements = ['pytest-runner']
9 +setup_requirements = ['pytest-runner==5.1.0']
10 install_requirements = _get_requirements(
11 'requirements.txt', exclude=['setuptools'])
12 test_requirements = _get_requirements('requirements_test.txt') No newline at end of file
@@ -0,0 +1,10 b''
1 diff -rup zip-1.2.0-orig/pyproject.toml zip-1.2.0/pyproject.toml
2 --- zip-1.2.0-orig/pyproject.toml 2021-03-23 10:55:37.000000000 +0100
3 +++ zip-1.2.0/pyproject.toml 2021-03-23 10:56:05.000000000 +0100
4 @@ -1,5 +1,5 @@
5 [build-system]
6 -requires = ["setuptools>=34.4", "wheel", "setuptools_scm>=1.15"]
7 +requires = ["setuptools<42.0", "wheel", "setuptools_scm<6.0.0"]
8 build-backend = "setuptools.build_meta"
9
10 [tool.black]
@@ -0,0 +1,74 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import mock
22 import pytest
23
24 from rhodecode.lib.utils2 import str2bool
25 from rhodecode.lib.vcs.exceptions import RepositoryRequirementError
26 from rhodecode.model.db import Repository, UserRepoToPerm, Permission, User
27 from rhodecode.model.meta import Session
28 from rhodecode.tests import (
29 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, assert_session_flash)
30 from rhodecode.tests.fixture import Fixture
31
32 fixture = Fixture()
33
34
35 def route_path(name, params=None, **kwargs):
36 import urllib
37
38 base_url = {
39 'edit_repo_maintenance': '/{repo_name}/settings/maintenance',
40 'edit_repo_maintenance_execute': '/{repo_name}/settings/maintenance/execute',
41
42 }[name].format(**kwargs)
43
44 if params:
45 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
46 return base_url
47
48
49 def _get_permission_for_user(user, repo):
50 perm = UserRepoToPerm.query()\
51 .filter(UserRepoToPerm.repository ==
52 Repository.get_by_repo_name(repo))\
53 .filter(UserRepoToPerm.user == User.get_by_username(user))\
54 .all()
55 return perm
56
57
58 @pytest.mark.usefixtures('autologin_user', 'app')
59 class TestAdminRepoMaintenance(object):
60 @pytest.mark.parametrize('urlname', [
61 'edit_repo_maintenance',
62 ])
63 def test_show_page(self, urlname, app, backend):
64 app.get(route_path(urlname, repo_name=backend.repo_name), status=200)
65
66 def test_execute_maintenance_for_repo_hg(self, app, backend_hg, autologin_user, xhr_header):
67 repo_name = backend_hg.repo_name
68
69 response = app.get(
70 route_path('edit_repo_maintenance_execute',
71 repo_name=repo_name,),
72 extra_environ=xhr_header)
73
74 assert "HG Verify repo" in ''.join(response.json)
@@ -1,6 +1,5 b''
1 1 [bumpversion]
2 current_version = 4.24.1
2 current_version = 4.25.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:rhodecode/VERSION]
6
@@ -1,33 +1,28 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:rc_tools_pinned]
8 done = true
9 8
10 9 [task:fixes_on_stable]
11 done = true
12 10
13 11 [task:pip2nix_generated]
14 done = true
15 12
16 13 [task:changelog_updated]
17 done = true
18 14
19 15 [task:generate_api_docs]
20 done = true
16
17 [task:updated_translation]
21 18
22 19 [release]
23 state = prepared
24 version = 4.24.1
25
26 [task:updated_translation]
20 state = in_progress
21 version = 4.25.0
27 22
28 23 [task:generate_js_routes]
29 24
30 25 [task:updated_trial_license]
31 26
32 27 [task:generate_oss_licenses]
33 28
@@ -1,184 +1,202 b''
1 1 .. _store-methods-ref:
2 2
3 3 store methods
4 4 =============
5 5
6 6 file_store_add (EE only)
7 7 ------------------------
8 8
9 9 .. py:function:: file_store_add(apiuser, filename, content, description=<Optional:''>)
10 10
11 11 Upload API for the file_store
12 12
13 13 Example usage from CLI::
14 14 rhodecode-api --instance-name=enterprise-1 upload_file "{"content": "$(cat image.jpg | base64)", "filename":"image.jpg"}"
15 15
16 16 This command takes the following options:
17 17
18 18 :param apiuser: This is filled automatically from the |authtoken|.
19 19 :type apiuser: AuthUser
20 20 :param filename: name of the file uploaded
21 21 :type filename: str
22 22 :param description: Optional description for added file
23 23 :type description: str
24 24 :param content: base64 encoded content of the uploaded file
25 25 :type content: str
26 26
27 27 Example output:
28 28
29 29 .. code-block:: bash
30 30
31 31 id : <id_given_in_input>
32 32 result: {
33 33 "access_path": "/_file_store/download/84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg",
34 34 "access_path_fqn": "http://server.domain.com/_file_store/download/84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg",
35 35 "store_fid": "84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg"
36 36 }
37 37 error : null
38 38
39 39
40 40 file_store_add_with_acl (EE only)
41 41 ---------------------------------
42 42
43 43 .. py:function:: file_store_add_with_acl(apiuser, filename, content, description=<Optional:''>, scope_user_id=<Optional:None>, scope_repo_id=<Optional:None>, scope_repo_group_id=<Optional:None>)
44 44
45 45 Upload API for the file_store
46 46
47 47 Example usage from CLI::
48 48 rhodecode-api --instance-name=enterprise-1 upload_file "{"content": "$(cat image.jpg | base64)", "filename":"image.jpg", "scope_repo_id":101}"
49 49
50 50 This command takes the following options:
51 51
52 52 :param apiuser: This is filled automatically from the |authtoken|.
53 53 :type apiuser: AuthUser
54 54 :param filename: name of the file uploaded
55 55 :type filename: str
56 56 :param description: Optional description for added file
57 57 :type description: str
58 58 :param content: base64 encoded content of the uploaded file
59 59 :type content: str
60 60
61 61 :param scope_user_id: Optionally bind this file to user.
62 62 This will check ACL in such way only this user can access the file.
63 63 :type scope_user_id: int
64 64 :param scope_repo_id: Optionally bind this file to repository.
65 65 This will check ACL in such way only user with proper access to such
66 66 repository can access the file.
67 67 :type scope_repo_id: int
68 68 :param scope_repo_group_id: Optionally bind this file to repository group.
69 69 This will check ACL in such way only user with proper access to such
70 70 repository group can access the file.
71 71 :type scope_repo_group_id: int
72 72
73 73 Example output:
74 74
75 75 .. code-block:: bash
76 76
77 77 id : <id_given_in_input>
78 78 result: {
79 79 "access_path": "/_file_store/download/84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg",
80 80 "access_path_fqn": "http://server.domain.com/_file_store/download/84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg",
81 81 "store_fid": "84d156f7-8323-4ad3-9fce-4a8e88e1deaf-0.jpg"
82 82 }
83 83 error : null
84 84
85 85
86 86 file_store_get_info (EE only)
87 87 -----------------------------
88 88
89 89 .. py:function:: file_store_get_info(apiuser, store_fid)
90 90
91 91 Get artifact data.
92 92
93 93 Example output:
94 94
95 95 .. code-block:: bash
96 96
97 97 id : <id_given_in_input>
98 98 result: {
99 99 "artifact": {
100 100 "access_path_fqn": "https://rhodecode.example.com/_file_store/download/0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg",
101 101 "created_on": "2019-10-15T16:25:35.491",
102 102 "description": "my upload",
103 103 "downloaded_times": 1,
104 104 "file_uid": "0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg",
105 105 "filename": "example.jpg",
106 106 "filename_org": "0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg",
107 107 "hidden": false,
108 108 "metadata": [
109 109 {
110 110 "artifact": "0-031c2aa0-0d56-49a7-9ba3-b570bdd342ab.jpg",
111 111 "key": "yellow",
112 112 "section": "tags",
113 113 "value": "bar"
114 114 }
115 115 ],
116 116 "sha256": "818dff0f44574dfb6814d38e6bf3c60c5943d1d13653398ecddaedf2f6a5b04d",
117 117 "size": 18599,
118 118 "uploaded_by": {
119 119 "email": "admin@rhodecode.com",
120 120 "emails": [
121 121 "admin@rhodecode.com"
122 122 ],
123 123 "firstname": "Admin",
124 124 "lastname": "LastName",
125 125 "user_id": 2,
126 126 "username": "admin"
127 127 }
128 128 }
129 129 }
130 130 error : null
131 131
132 132
133 file_store_delete (EE only)
134 ---------------------------
135
136 .. py:function:: file_store_delete(apiuser, store_fid)
137
138 Delete an artifact based on the secret uuid.
139
140 Example output:
141
142 .. code-block:: bash
143
144 id : <id_given_in_input>
145 result: {
146 "artifact" : {"uid": "some uid", "removed": true}
147 }
148 error : null
149
150
133 151 file_store_add_metadata (EE only)
134 152 ---------------------------------
135 153
136 154 .. py:function:: file_store_add_metadata(apiuser, store_fid, section, key, value, value_type=<Optional:'unicode'>)
137 155
138 156 Add metadata into artifact. The metadata consist of section, key, value. eg.
139 157 section='tags', 'key'='tag_name', value='1'
140 158
141 159 :param apiuser: This is filled automatically from the |authtoken|.
142 160 :type apiuser: AuthUser
143 161
144 162 :param store_fid: file uid, e.g 0-d054cb71-91ab-44e2-9e4b-23fe14b4d74a.mp4
145 163 :type store_fid: str
146 164
147 165 :param section: Section name to add metadata
148 166 :type section: str
149 167
150 168 :param key: Key to add as metadata
151 169 :type key: str
152 170
153 171 :param value: Value to add as metadata
154 172 :type value: str
155 173
156 174 :param value_type: Optional type, default is 'unicode' other types are:
157 175 int, list, bool, unicode, str
158 176
159 177 :type value_type: str
160 178
161 179 Example output:
162 180
163 181 .. code-block:: bash
164 182
165 183 id : <id_given_in_input>
166 184 result: {
167 185 "metadata": [
168 186 {
169 187 "artifact": "0-d054cb71-91ab-44e2-9e4b-23fe14b4d74a.mp4",
170 188 "key": "secret",
171 189 "section": "tags",
172 190 "value": "1"
173 191 },
174 192 {
175 193 "artifact": "0-d054cb71-91ab-44e2-9e4b-23fe14b4d74a.mp4",
176 194 "key": "video",
177 195 "section": "tags",
178 196 "value": "1"
179 197 }
180 198 ]
181 199 }
182 200 error : null
183 201
184 202
@@ -1,153 +1,154 b''
1 1 .. _rhodecode-release-notes-ref:
2 2
3 3 Release Notes
4 4 =============
5 5
6 6 |RCE| 4.x Versions
7 7 ------------------
8 8
9 9 .. toctree::
10 10 :maxdepth: 1
11 11
12 release-notes-4.25.0.rst
12 13 release-notes-4.24.1.rst
13 14 release-notes-4.24.0.rst
14 15 release-notes-4.23.2.rst
15 16 release-notes-4.23.1.rst
16 17 release-notes-4.23.0.rst
17 18 release-notes-4.22.0.rst
18 19 release-notes-4.21.0.rst
19 20 release-notes-4.20.1.rst
20 21 release-notes-4.20.0.rst
21 22 release-notes-4.19.3.rst
22 23 release-notes-4.19.2.rst
23 24 release-notes-4.19.1.rst
24 25 release-notes-4.19.0.rst
25 26 release-notes-4.18.3.rst
26 27 release-notes-4.18.2.rst
27 28 release-notes-4.18.1.rst
28 29 release-notes-4.18.0.rst
29 30 release-notes-4.17.4.rst
30 31 release-notes-4.17.3.rst
31 32 release-notes-4.17.2.rst
32 33 release-notes-4.17.1.rst
33 34 release-notes-4.17.0.rst
34 35 release-notes-4.16.2.rst
35 36 release-notes-4.16.1.rst
36 37 release-notes-4.16.0.rst
37 38 release-notes-4.15.2.rst
38 39 release-notes-4.15.1.rst
39 40 release-notes-4.15.0.rst
40 41 release-notes-4.14.1.rst
41 42 release-notes-4.14.0.rst
42 43 release-notes-4.13.3.rst
43 44 release-notes-4.13.2.rst
44 45 release-notes-4.13.1.rst
45 46 release-notes-4.13.0.rst
46 47 release-notes-4.12.4.rst
47 48 release-notes-4.12.3.rst
48 49 release-notes-4.12.2.rst
49 50 release-notes-4.12.1.rst
50 51 release-notes-4.12.0.rst
51 52 release-notes-4.11.6.rst
52 53 release-notes-4.11.5.rst
53 54 release-notes-4.11.4.rst
54 55 release-notes-4.11.3.rst
55 56 release-notes-4.11.2.rst
56 57 release-notes-4.11.1.rst
57 58 release-notes-4.11.0.rst
58 59 release-notes-4.10.6.rst
59 60 release-notes-4.10.5.rst
60 61 release-notes-4.10.4.rst
61 62 release-notes-4.10.3.rst
62 63 release-notes-4.10.2.rst
63 64 release-notes-4.10.1.rst
64 65 release-notes-4.10.0.rst
65 66 release-notes-4.9.1.rst
66 67 release-notes-4.9.0.rst
67 68 release-notes-4.8.0.rst
68 69 release-notes-4.7.2.rst
69 70 release-notes-4.7.1.rst
70 71 release-notes-4.7.0.rst
71 72 release-notes-4.6.1.rst
72 73 release-notes-4.6.0.rst
73 74 release-notes-4.5.2.rst
74 75 release-notes-4.5.1.rst
75 76 release-notes-4.5.0.rst
76 77 release-notes-4.4.2.rst
77 78 release-notes-4.4.1.rst
78 79 release-notes-4.4.0.rst
79 80 release-notes-4.3.1.rst
80 81 release-notes-4.3.0.rst
81 82 release-notes-4.2.1.rst
82 83 release-notes-4.2.0.rst
83 84 release-notes-4.1.2.rst
84 85 release-notes-4.1.1.rst
85 86 release-notes-4.1.0.rst
86 87 release-notes-4.0.1.rst
87 88 release-notes-4.0.0.rst
88 89
89 90 |RCE| 3.x Versions
90 91 ------------------
91 92
92 93 .. toctree::
93 94 :maxdepth: 1
94 95
95 96 release-notes-3.8.4.rst
96 97 release-notes-3.8.3.rst
97 98 release-notes-3.8.2.rst
98 99 release-notes-3.8.1.rst
99 100 release-notes-3.8.0.rst
100 101 release-notes-3.7.1.rst
101 102 release-notes-3.7.0.rst
102 103 release-notes-3.6.1.rst
103 104 release-notes-3.6.0.rst
104 105 release-notes-3.5.2.rst
105 106 release-notes-3.5.1.rst
106 107 release-notes-3.5.0.rst
107 108 release-notes-3.4.1.rst
108 109 release-notes-3.4.0.rst
109 110 release-notes-3.3.4.rst
110 111 release-notes-3.3.3.rst
111 112 release-notes-3.3.2.rst
112 113 release-notes-3.3.1.rst
113 114 release-notes-3.3.0.rst
114 115 release-notes-3.2.3.rst
115 116 release-notes-3.2.2.rst
116 117 release-notes-3.2.1.rst
117 118 release-notes-3.2.0.rst
118 119 release-notes-3.1.1.rst
119 120 release-notes-3.1.0.rst
120 121 release-notes-3.0.2.rst
121 122 release-notes-3.0.1.rst
122 123 release-notes-3.0.0.rst
123 124
124 125 |RCE| 2.x Versions
125 126 ------------------
126 127
127 128 .. toctree::
128 129 :maxdepth: 1
129 130
130 131 release-notes-2.2.8.rst
131 132 release-notes-2.2.7.rst
132 133 release-notes-2.2.6.rst
133 134 release-notes-2.2.5.rst
134 135 release-notes-2.2.4.rst
135 136 release-notes-2.2.3.rst
136 137 release-notes-2.2.2.rst
137 138 release-notes-2.2.1.rst
138 139 release-notes-2.2.0.rst
139 140 release-notes-2.1.0.rst
140 141 release-notes-2.0.2.rst
141 142 release-notes-2.0.1.rst
142 143 release-notes-2.0.0.rst
143 144
144 145 |RCE| 1.x Versions
145 146 ------------------
146 147
147 148 .. toctree::
148 149 :maxdepth: 1
149 150
150 151 release-notes-1.7.2.rst
151 152 release-notes-1.7.1.rst
152 153 release-notes-1.7.0.rst
153 154 release-notes-1.6.0.rst
@@ -1,12 +1,12 b''
1 1 diff -rup pytest-4.6.5-orig/setup.py pytest-4.6.5/setup.py
2 2 --- pytest-4.6.5-orig/setup.py 2018-04-10 10:23:04.000000000 +0200
3 3 +++ pytest-4.6.5/setup.py 2018-04-10 10:23:34.000000000 +0200
4 4 @@ -24,7 +24,7 @@ INSTALL_REQUIRES = [
5 5 def main():
6 6 setup(
7 7 use_scm_version={"write_to": "src/_pytest/_version.py"},
8 8 - setup_requires=["setuptools-scm", "setuptools>=40.0"],
9 + setup_requires=["setuptools-scm", "setuptools<=42.0"],
9 + setup_requires=["setuptools-scm<6.0.0", "setuptools<=42.0"],
10 10 package_dir={"": "src"},
11 11 # fmt: off
12 12 extras_require={ No newline at end of file
@@ -1,287 +1,353 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 7 { pkgs
8 8 , basePythonPackages
9 9 }:
10 10
11 11 let
12 12 sed = "sed -i";
13 13
14 14 localLicenses = {
15 15 repoze = {
16 16 fullName = "Repoze License";
17 17 url = http://www.repoze.org/LICENSE.txt;
18 18 };
19 19 };
20 20
21 21 in
22 22
23 23 self: super: {
24 24
25 25 "appenlight-client" = super."appenlight-client".override (attrs: {
26 26 meta = {
27 27 license = [ pkgs.lib.licenses.bsdOriginal ];
28 28 };
29 29 });
30 30
31 31 "beaker" = super."beaker".override (attrs: {
32 32 patches = [
33 33 ./patches/beaker/patch-beaker-lock-func-debug.diff
34 34 ./patches/beaker/patch-beaker-metadata-reuse.diff
35 35 ./patches/beaker/patch-beaker-improved-redis.diff
36 36 ./patches/beaker/patch-beaker-improved-redis-2.diff
37 37 ];
38 38 });
39 39
40 40 "cffi" = super."cffi".override (attrs: {
41 41 buildInputs = [
42 42 pkgs.libffi
43 43 ];
44 44 });
45 45
46 46 "cryptography" = super."cryptography".override (attrs: {
47 47 buildInputs = [
48 48 pkgs.openssl
49 49 ];
50 50 });
51 51
52 52 "gevent" = super."gevent".override (attrs: {
53 53 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
54 54 # NOTE: (marcink) odd requirements from gevent aren't set properly,
55 55 # thus we need to inject psutil manually
56 56 self."psutil"
57 57 ];
58 58 });
59 59
60 60 "future" = super."future".override (attrs: {
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 });
65 65
66 66 "testpath" = super."testpath".override (attrs: {
67 67 meta = {
68 68 license = [ pkgs.lib.licenses.mit ];
69 69 };
70 70 });
71 71
72 72 "gnureadline" = super."gnureadline".override (attrs: {
73 73 buildInputs = [
74 74 pkgs.ncurses
75 75 ];
76 76 patchPhase = ''
77 77 substituteInPlace setup.py --replace "/bin/bash" "${pkgs.bash}/bin/bash"
78 78 '';
79 79 });
80 80
81 81 "gunicorn" = super."gunicorn".override (attrs: {
82 82 propagatedBuildInputs = [
83 83 # johbo: futures is needed as long as we are on Python 2, otherwise
84 84 # gunicorn explodes if used with multiple threads per worker.
85 85 self."futures"
86 86 ];
87 87 });
88 88
89 89 "nbconvert" = super."nbconvert".override (attrs: {
90 90 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
91 91 # marcink: plug in jupyter-client for notebook rendering
92 92 self."jupyter-client"
93 93 ];
94 94 });
95 95
96 96 "ipython" = super."ipython".override (attrs: {
97 97 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
98 98 self."gnureadline"
99 99 ];
100 100 });
101 101
102 102 "lxml" = super."lxml".override (attrs: {
103 103 buildInputs = [
104 104 pkgs.libxml2
105 105 pkgs.libxslt
106 106 ];
107 107 propagatedBuildInputs = [
108 108 # Needed, so that "setup.py bdist_wheel" does work
109 109 self."wheel"
110 110 ];
111 111 });
112 112
113 113 "mysql-python" = super."mysql-python".override (attrs: {
114 114 buildInputs = [
115 115 pkgs.openssl
116 116 ];
117 117 propagatedBuildInputs = [
118 118 pkgs.libmysql
119 119 pkgs.zlib
120 120 ];
121 121 });
122 122
123 123 "psycopg2" = super."psycopg2".override (attrs: {
124 124 propagatedBuildInputs = [
125 125 pkgs.postgresql
126 126 ];
127 127 meta = {
128 128 license = pkgs.lib.licenses.lgpl3Plus;
129 129 };
130 130 });
131 131
132 132 "pycurl" = super."pycurl".override (attrs: {
133 133 propagatedBuildInputs = [
134 134 pkgs.curl
135 135 pkgs.openssl
136 136 ];
137 137
138 138 preConfigure = ''
139 139 substituteInPlace setup.py --replace '--static-libs' '--libs'
140 140 export PYCURL_SSL_LIBRARY=openssl
141 141 '';
142 142
143 143 meta = {
144 144 license = pkgs.lib.licenses.mit;
145 145 };
146 146 });
147 147
148 148 "pyramid" = super."pyramid".override (attrs: {
149 149 meta = {
150 150 license = localLicenses.repoze;
151 151 };
152 152 });
153 153
154 154 "pyramid-debugtoolbar" = super."pyramid-debugtoolbar".override (attrs: {
155 155 meta = {
156 156 license = [ pkgs.lib.licenses.bsdOriginal localLicenses.repoze ];
157 157 };
158 158 });
159 159
160 160 "pysqlite" = super."pysqlite".override (attrs: {
161 161 propagatedBuildInputs = [
162 162 pkgs.sqlite
163 163 ];
164 164 meta = {
165 165 license = [ pkgs.lib.licenses.zlib pkgs.lib.licenses.libpng ];
166 166 };
167 167 });
168 168
169 169 "python-ldap" = super."python-ldap".override (attrs: {
170 170 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
171 171 pkgs.openldap
172 172 pkgs.cyrus_sasl
173 173 pkgs.openssl
174 174 ];
175 175 });
176 176
177 177 "python-pam" = super."python-pam".override (attrs: {
178 178 propagatedBuildInputs = [
179 179 pkgs.pam
180 180 ];
181 181
182 182 # TODO: johbo: Check if this can be avoided, or transform into
183 183 # a real patch
184 184 patchPhase = ''
185 185 substituteInPlace pam.py \
186 186 --replace 'find_library("pam")' '"${pkgs.pam}/lib/libpam.so.0"'
187 187 '';
188 188
189 189 });
190 190
191 191 "python-saml" = super."python-saml".override (attrs: {
192 192 buildInputs = [
193 193 pkgs.libxml2
194 194 pkgs.libxslt
195 195 ];
196 196 });
197 197
198 198 "dm.xmlsec.binding" = super."dm.xmlsec.binding".override (attrs: {
199 199 buildInputs = [
200 200 pkgs.libxml2
201 201 pkgs.libxslt
202 202 pkgs.xmlsec
203 203 pkgs.libtool
204 204 ];
205 205 });
206 206
207 207 "pyzmq" = super."pyzmq".override (attrs: {
208 208 buildInputs = [
209 209 pkgs.czmq
210 210 ];
211 211 });
212 212
213 213 "urlobject" = super."urlobject".override (attrs: {
214 214 meta = {
215 215 license = {
216 216 spdxId = "Unlicense";
217 217 fullName = "The Unlicense";
218 218 url = http://unlicense.org/;
219 219 };
220 220 };
221 221 });
222 222
223 223 "docutils" = super."docutils".override (attrs: {
224 224 meta = {
225 225 license = pkgs.lib.licenses.bsd2;
226 226 };
227 227 });
228 228
229 229 "colander" = super."colander".override (attrs: {
230 230 meta = {
231 231 license = localLicenses.repoze;
232 232 };
233 233 });
234 234
235 235 "pyramid-beaker" = super."pyramid-beaker".override (attrs: {
236 236 meta = {
237 237 license = localLicenses.repoze;
238 238 };
239 239 });
240 240
241 241 "pyramid-mako" = super."pyramid-mako".override (attrs: {
242 242 meta = {
243 243 license = localLicenses.repoze;
244 244 };
245 245 });
246 246
247 247 "repoze.lru" = super."repoze.lru".override (attrs: {
248 248 meta = {
249 249 license = localLicenses.repoze;
250 250 };
251 251 });
252 252
253 253 "python-editor" = super."python-editor".override (attrs: {
254 254 meta = {
255 255 license = pkgs.lib.licenses.asl20;
256 256 };
257 257 });
258 258
259 259 "translationstring" = super."translationstring".override (attrs: {
260 260 meta = {
261 261 license = localLicenses.repoze;
262 262 };
263 263 });
264 264
265 265 "venusian" = super."venusian".override (attrs: {
266 266 meta = {
267 267 license = localLicenses.repoze;
268 268 };
269 269 });
270 270
271 271 "supervisor" = super."supervisor".override (attrs: {
272 272 patches = [
273 273 ./patches/supervisor/patch-rlimits-old-kernel.diff
274 274 ];
275 275 });
276 276
277 277 "pytest" = super."pytest".override (attrs: {
278 278 patches = [
279 279 ./patches/pytest/setuptools.patch
280 280 ];
281 281 });
282 282
283 "pytest-runner" = super."pytest-runner".override (attrs: {
284 propagatedBuildInputs = [
285 self."setuptools-scm"
286 ];
287 });
288
289 "py" = super."py".override (attrs: {
290 propagatedBuildInputs = [
291 self."setuptools-scm"
292 ];
293 });
294
295 "python-dateutil" = super."python-dateutil".override (attrs: {
296 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
297 self."setuptools-scm"
298 ];
299 });
300
301 "configparser" = super."configparser".override (attrs: {
302 patches = [
303 ./patches/configparser/pyproject.patch
304 ];
305 propagatedBuildInputs = [
306 self."setuptools-scm"
307 ];
308 });
309
310 "importlib-metadata" = super."importlib-metadata".override (attrs: {
311
312 patches = [
313 ./patches/importlib_metadata/pyproject.patch
314 ];
315
316 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
317 self."setuptools-scm"
318 ];
319
320 });
321
322 "zipp" = super."zipp".override (attrs: {
323 patches = [
324 ./patches/zipp/pyproject.patch
325 ];
326 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
327 self."setuptools-scm"
328 ];
329 });
330
331 "pyramid-apispec" = super."pyramid-apispec".override (attrs: {
332 patches = [
333 ./patches/pyramid_apispec/setuptools.patch
334 ];
335 });
336
337 "channelstream" = super."channelstream".override (attrs: {
338 patches = [
339 ./patches/channelstream/setuptools.patch
340 ];
341 });
342
343 "rhodecode-tools" = super."rhodecode-tools".override (attrs: {
344 patches = [
345 ./patches/rhodecode_tools/setuptools.patch
346 ];
347 });
348
283 349 # Avoid that base packages screw up the build process
284 350 inherit (basePythonPackages)
285 351 setuptools;
286 352
287 353 }
@@ -1,2509 +1,2520 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "alembic" = super.buildPythonPackage {
8 8 name = "alembic-1.4.2";
9 9 doCheck = false;
10 10 propagatedBuildInputs = [
11 11 self."sqlalchemy"
12 12 self."mako"
13 13 self."python-editor"
14 14 self."python-dateutil"
15 15 ];
16 16 src = fetchurl {
17 17 url = "https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz";
18 18 sha256 = "1gsdrzx9h7wfva200qvvsc9sn4w79mk2vs0bbnzjhxi1jw2b0nh3";
19 19 };
20 20 meta = {
21 21 license = [ pkgs.lib.licenses.mit ];
22 22 };
23 23 };
24 24 "amqp" = super.buildPythonPackage {
25 25 name = "amqp-2.5.2";
26 26 doCheck = false;
27 27 propagatedBuildInputs = [
28 28 self."vine"
29 29 ];
30 30 src = fetchurl {
31 31 url = "https://files.pythonhosted.org/packages/92/1d/433541994a5a69f4ad2fff39746ddbb0bdedb0ea0d85673eb0db68a7edd9/amqp-2.5.2.tar.gz";
32 32 sha256 = "13dhhfxjrqcjybnq4zahg92mydhpg2l76nxcmq7d560687wsxwbp";
33 33 };
34 34 meta = {
35 35 license = [ pkgs.lib.licenses.bsdOriginal ];
36 36 };
37 37 };
38 38 "apispec" = super.buildPythonPackage {
39 39 name = "apispec-1.0.0";
40 40 doCheck = false;
41 41 propagatedBuildInputs = [
42 42 self."PyYAML"
43 43 ];
44 44 src = fetchurl {
45 45 url = "https://files.pythonhosted.org/packages/67/15/346c04988dd67d36007e28145504c520491930c878b1f484a97b27a8f497/apispec-1.0.0.tar.gz";
46 46 sha256 = "1712w1anvqrvadjjpvai84vbaygaxabd3zz5lxihdzwzs4gvi9sp";
47 47 };
48 48 meta = {
49 49 license = [ pkgs.lib.licenses.mit ];
50 50 };
51 51 };
52 52 "appenlight-client" = super.buildPythonPackage {
53 53 name = "appenlight-client-0.6.26";
54 54 doCheck = false;
55 55 propagatedBuildInputs = [
56 56 self."webob"
57 57 self."requests"
58 58 self."six"
59 59 ];
60 60 src = fetchurl {
61 61 url = "https://files.pythonhosted.org/packages/2e/56/418fc10379b96e795ee39a15e69a730c222818af04c3821fa354eaa859ec/appenlight_client-0.6.26.tar.gz";
62 62 sha256 = "0s9xw3sb8s3pk73k78nnq4jil3q4mk6bczfa1fmgfx61kdxl2712";
63 63 };
64 64 meta = {
65 65 license = [ pkgs.lib.licenses.bsdOriginal ];
66 66 };
67 67 };
68 68 "asn1crypto" = super.buildPythonPackage {
69 69 name = "asn1crypto-0.24.0";
70 70 doCheck = false;
71 71 src = fetchurl {
72 72 url = "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz";
73 73 sha256 = "0jaf8rf9dx1lf23xfv2cdd5h52f1qr3w8k63985bc35g3d220p4x";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.mit ];
77 77 };
78 78 };
79 79 "atomicwrites" = super.buildPythonPackage {
80 80 name = "atomicwrites-1.3.0";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
84 84 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "attrs" = super.buildPythonPackage {
91 91 name = "attrs-19.3.0";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
95 95 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.mit ];
99 99 };
100 100 };
101 101 "babel" = super.buildPythonPackage {
102 102 name = "babel-1.3";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."pytz"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/33/27/e3978243a03a76398c384c83f7ca879bc6e8f1511233a621fcada135606e/Babel-1.3.tar.gz";
109 109 sha256 = "0bnin777lc53nxd1hp3apq410jj5wx92n08h7h4izpl4f4sx00lz";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.bsdOriginal ];
113 113 };
114 114 };
115 115 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
116 116 name = "backports.shutil-get-terminal-size-1.0.0";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
120 120 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.mit ];
124 124 };
125 125 };
126 126 "beaker" = super.buildPythonPackage {
127 127 name = "beaker-1.9.1";
128 128 doCheck = false;
129 129 propagatedBuildInputs = [
130 130 self."funcsigs"
131 131 ];
132 132 src = fetchurl {
133 133 url = "https://files.pythonhosted.org/packages/ca/14/a626188d0d0c7b55dd7cf1902046c2743bd392a7078bb53073e13280eb1e/Beaker-1.9.1.tar.gz";
134 134 sha256 = "08arsn61r255lhz6hcpn2lsiqpg30clla805ysx06wmbhvb6w9rj";
135 135 };
136 136 meta = {
137 137 license = [ pkgs.lib.licenses.bsdOriginal ];
138 138 };
139 139 };
140 140 "beautifulsoup4" = super.buildPythonPackage {
141 141 name = "beautifulsoup4-4.6.3";
142 142 doCheck = false;
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
145 145 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.mit ];
149 149 };
150 150 };
151 151 "billiard" = super.buildPythonPackage {
152 152 name = "billiard-3.6.1.0";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/68/1d/2aea8fbb0b1e1260a8a2e77352de2983d36d7ac01207cf14c2b9c6cc860e/billiard-3.6.1.0.tar.gz";
156 156 sha256 = "09hzy3aqi7visy4vmf4xiish61n0rq5nd3iwjydydps8yrs9r05q";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "bleach" = super.buildPythonPackage {
163 163 name = "bleach-3.1.3";
164 164 doCheck = false;
165 165 propagatedBuildInputs = [
166 166 self."six"
167 167 self."webencodings"
168 168 ];
169 169 src = fetchurl {
170 170 url = "https://files.pythonhosted.org/packages/de/09/5267f8577a92487ed43bc694476c4629c6eca2e3c93fcf690a26bfe39e1d/bleach-3.1.3.tar.gz";
171 171 sha256 = "0al437aw4p2xp83az5hhlrp913nsf0cg6kg4qj3fjhv4wakxipzq";
172 172 };
173 173 meta = {
174 174 license = [ pkgs.lib.licenses.asl20 ];
175 175 };
176 176 };
177 177 "bumpversion" = super.buildPythonPackage {
178 178 name = "bumpversion-0.5.3";
179 179 doCheck = false;
180 180 src = fetchurl {
181 181 url = "https://files.pythonhosted.org/packages/14/41/8c9da3549f8e00c84f0432c3a8cf8ed6898374714676aab91501d48760db/bumpversion-0.5.3.tar.gz";
182 182 sha256 = "0zn7694yfipxg35ikkfh7kvgl2fissha3dnqad2c5bvsvmrwhi37";
183 183 };
184 184 meta = {
185 185 license = [ pkgs.lib.licenses.mit ];
186 186 };
187 187 };
188 188 "cachetools" = super.buildPythonPackage {
189 189 name = "cachetools-3.1.1";
190 190 doCheck = false;
191 191 src = fetchurl {
192 192 url = "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz";
193 193 sha256 = "16m69l6n6y1r1y7cklm92rr7v69ldig2n3lbl3j323w5jz7d78lf";
194 194 };
195 195 meta = {
196 196 license = [ pkgs.lib.licenses.mit ];
197 197 };
198 198 };
199 199 "celery" = super.buildPythonPackage {
200 200 name = "celery-4.3.0";
201 201 doCheck = false;
202 202 propagatedBuildInputs = [
203 203 self."pytz"
204 204 self."billiard"
205 205 self."kombu"
206 206 self."vine"
207 207 ];
208 208 src = fetchurl {
209 209 url = "https://files.pythonhosted.org/packages/a2/4b/d020836f751617e907e84753a41c92231cd4b673ff991b8ee9da52361323/celery-4.3.0.tar.gz";
210 210 sha256 = "1y8y0gbgkwimpxqnxq2rm5qz2vy01fvjiybnpm00y5rzd2m34iac";
211 211 };
212 212 meta = {
213 213 license = [ pkgs.lib.licenses.bsdOriginal ];
214 214 };
215 215 };
216 216 "certifi" = super.buildPythonPackage {
217 217 name = "certifi-2020.4.5.1";
218 218 doCheck = false;
219 219 src = fetchurl {
220 220 url = "https://files.pythonhosted.org/packages/b8/e2/a3a86a67c3fc8249ed305fc7b7d290ebe5e4d46ad45573884761ef4dea7b/certifi-2020.4.5.1.tar.gz";
221 221 sha256 = "06b5gfs7wmmipln8f3z928d2mmx2j4b3x7pnqmj6cvmyfh8v7z2i";
222 222 };
223 223 meta = {
224 224 license = [ pkgs.lib.licenses.mpl20 { fullName = "Mozilla Public License 2.0 (MPL 2.0)"; } ];
225 225 };
226 226 };
227 227 "cffi" = super.buildPythonPackage {
228 228 name = "cffi-1.12.3";
229 229 doCheck = false;
230 230 propagatedBuildInputs = [
231 231 self."pycparser"
232 232 ];
233 233 src = fetchurl {
234 234 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
235 235 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
236 236 };
237 237 meta = {
238 238 license = [ pkgs.lib.licenses.mit ];
239 239 };
240 240 };
241 241 "chameleon" = super.buildPythonPackage {
242 242 name = "chameleon-2.24";
243 243 doCheck = false;
244 244 src = fetchurl {
245 245 url = "https://files.pythonhosted.org/packages/5a/9e/637379ffa13c5172b5c0e704833ffea6bf51cec7567f93fd6e903d53ed74/Chameleon-2.24.tar.gz";
246 246 sha256 = "0ykqr7syxfa6h9adjfnsv1gdsca2xzm22vmic8859n0f0j09abj5";
247 247 };
248 248 meta = {
249 249 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
250 250 };
251 251 };
252 252 "channelstream" = super.buildPythonPackage {
253 253 name = "channelstream-0.6.14";
254 254 doCheck = false;
255 255 propagatedBuildInputs = [
256 256 self."gevent"
257 257 self."ws4py"
258 258 self."marshmallow"
259 259 self."python-dateutil"
260 260 self."pyramid"
261 261 self."pyramid-jinja2"
262 262 self."pyramid-apispec"
263 263 self."itsdangerous"
264 264 self."requests"
265 265 self."six"
266 266 ];
267 267 src = fetchurl {
268 268 url = "https://files.pythonhosted.org/packages/d4/2d/86d6757ccd06ce673ee224123471da3d45251d061da7c580bfc259bad853/channelstream-0.6.14.tar.gz";
269 269 sha256 = "0qgy5j3rj6c8cslzidh32glhkrhbbdxjc008y69v8a0y3zyaz2d3";
270 270 };
271 271 meta = {
272 272 license = [ pkgs.lib.licenses.bsdOriginal ];
273 273 };
274 274 };
275 275 "chardet" = super.buildPythonPackage {
276 276 name = "chardet-3.0.4";
277 277 doCheck = false;
278 278 src = fetchurl {
279 279 url = "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz";
280 280 sha256 = "1bpalpia6r5x1kknbk11p1fzph56fmmnp405ds8icksd3knr5aw4";
281 281 };
282 282 meta = {
283 283 license = [ { fullName = "LGPL"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
284 284 };
285 285 };
286 286 "click" = super.buildPythonPackage {
287 287 name = "click-7.0";
288 288 doCheck = false;
289 289 src = fetchurl {
290 290 url = "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz";
291 291 sha256 = "1mzjixd4vjbjvzb6vylki9w1556a9qmdh35kzmq6cign46av952v";
292 292 };
293 293 meta = {
294 294 license = [ pkgs.lib.licenses.bsdOriginal ];
295 295 };
296 296 };
297 297 "colander" = super.buildPythonPackage {
298 298 name = "colander-1.7.0";
299 299 doCheck = false;
300 300 propagatedBuildInputs = [
301 301 self."translationstring"
302 302 self."iso8601"
303 303 self."enum34"
304 304 ];
305 305 src = fetchurl {
306 306 url = "https://files.pythonhosted.org/packages/db/e4/74ab06f54211917b41865cafc987ce511e35503de48da9bfe9358a1bdc3e/colander-1.7.0.tar.gz";
307 307 sha256 = "1wl1bqab307lbbcjx81i28s3yl6dlm4rf15fxawkjb6j48x1cn6p";
308 308 };
309 309 meta = {
310 310 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
311 311 };
312 312 };
313 313 "configobj" = super.buildPythonPackage {
314 314 name = "configobj-5.0.6";
315 315 doCheck = false;
316 316 propagatedBuildInputs = [
317 317 self."six"
318 318 ];
319 319 src = fetchurl {
320 320 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
321 321 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
322 322 };
323 323 meta = {
324 324 license = [ pkgs.lib.licenses.bsdOriginal ];
325 325 };
326 326 };
327 327 "configparser" = super.buildPythonPackage {
328 328 name = "configparser-4.0.2";
329 329 doCheck = false;
330 330 src = fetchurl {
331 331 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
332 332 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
333 333 };
334 334 meta = {
335 335 license = [ pkgs.lib.licenses.mit ];
336 336 };
337 337 };
338 338 "contextlib2" = super.buildPythonPackage {
339 339 name = "contextlib2-0.6.0.post1";
340 340 doCheck = false;
341 341 src = fetchurl {
342 342 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
343 343 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
344 344 };
345 345 meta = {
346 346 license = [ pkgs.lib.licenses.psfl ];
347 347 };
348 348 };
349 349 "cov-core" = super.buildPythonPackage {
350 350 name = "cov-core-1.15.0";
351 351 doCheck = false;
352 352 propagatedBuildInputs = [
353 353 self."coverage"
354 354 ];
355 355 src = fetchurl {
356 356 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
357 357 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
358 358 };
359 359 meta = {
360 360 license = [ pkgs.lib.licenses.mit ];
361 361 };
362 362 };
363 363 "coverage" = super.buildPythonPackage {
364 364 name = "coverage-4.5.4";
365 365 doCheck = false;
366 366 src = fetchurl {
367 367 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
368 368 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
369 369 };
370 370 meta = {
371 371 license = [ pkgs.lib.licenses.asl20 ];
372 372 };
373 373 };
374 374 "cryptography" = super.buildPythonPackage {
375 375 name = "cryptography-2.6.1";
376 376 doCheck = false;
377 377 propagatedBuildInputs = [
378 378 self."asn1crypto"
379 379 self."six"
380 380 self."cffi"
381 381 self."enum34"
382 382 self."ipaddress"
383 383 ];
384 384 src = fetchurl {
385 385 url = "https://files.pythonhosted.org/packages/07/ca/bc827c5e55918ad223d59d299fff92f3563476c3b00d0a9157d9c0217449/cryptography-2.6.1.tar.gz";
386 386 sha256 = "19iwz5avym5zl6jrrrkym1rdaa9h61j20ph4cswsqgv8xg5j3j16";
387 387 };
388 388 meta = {
389 389 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
390 390 };
391 391 };
392 392 "cssselect" = super.buildPythonPackage {
393 393 name = "cssselect-1.0.3";
394 394 doCheck = false;
395 395 src = fetchurl {
396 396 url = "https://files.pythonhosted.org/packages/52/ea/f31e1d2e9eb130fda2a631e22eac369dc644e8807345fbed5113f2d6f92b/cssselect-1.0.3.tar.gz";
397 397 sha256 = "011jqa2jhmydhi0iz4v1w3cr540z5zas8g2bw8brdw4s4b2qnv86";
398 398 };
399 399 meta = {
400 400 license = [ pkgs.lib.licenses.bsdOriginal ];
401 401 };
402 402 };
403 403 "cssutils" = super.buildPythonPackage {
404 404 name = "cssutils-1.0.2";
405 405 doCheck = false;
406 406 src = fetchurl {
407 407 url = "https://files.pythonhosted.org/packages/5c/0b/c5f29d29c037e97043770b5e7c740b6252993e4b57f029b3cd03c78ddfec/cssutils-1.0.2.tar.gz";
408 408 sha256 = "1bxchrbqzapwijap0yhlxdil1w9bmwvgx77aizlkhc2mcxjg1z52";
409 409 };
410 410 meta = {
411 411 license = [ { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL 2.1 or later, see also http://cthedot.de/cssutils/"; } ];
412 412 };
413 413 };
414 414 "decorator" = super.buildPythonPackage {
415 415 name = "decorator-4.1.2";
416 416 doCheck = false;
417 417 src = fetchurl {
418 418 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
419 419 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
420 420 };
421 421 meta = {
422 422 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
423 423 };
424 424 };
425 425 "deform" = super.buildPythonPackage {
426 426 name = "deform-2.0.8";
427 427 doCheck = false;
428 428 propagatedBuildInputs = [
429 429 self."chameleon"
430 430 self."colander"
431 431 self."iso8601"
432 432 self."peppercorn"
433 433 self."translationstring"
434 434 self."zope.deprecation"
435 435 ];
436 436 src = fetchurl {
437 437 url = "https://files.pythonhosted.org/packages/21/d0/45fdf891a82722c02fc2da319cf2d1ae6b5abf9e470ad3762135a895a868/deform-2.0.8.tar.gz";
438 438 sha256 = "0wbjv98sib96649aqaygzxnrkclyy50qij2rha6fn1i4c86bfdl9";
439 439 };
440 440 meta = {
441 441 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
442 442 };
443 443 };
444 444 "defusedxml" = super.buildPythonPackage {
445 445 name = "defusedxml-0.6.0";
446 446 doCheck = false;
447 447 src = fetchurl {
448 448 url = "https://files.pythonhosted.org/packages/a4/5f/f8aa58ca0cf01cbcee728abc9d88bfeb74e95e6cb4334cfd5bed5673ea77/defusedxml-0.6.0.tar.gz";
449 449 sha256 = "1xbp8fivl3wlbyg2jrvs4lalaqv1xp9a9f29p75wdx2s2d6h717n";
450 450 };
451 451 meta = {
452 452 license = [ pkgs.lib.licenses.psfl ];
453 453 };
454 454 };
455 455 "dm.xmlsec.binding" = super.buildPythonPackage {
456 456 name = "dm.xmlsec.binding-1.3.7";
457 457 doCheck = false;
458 458 propagatedBuildInputs = [
459 459 self."setuptools"
460 460 self."lxml"
461 461 ];
462 462 src = fetchurl {
463 463 url = "https://files.pythonhosted.org/packages/2c/9e/7651982d50252692991acdae614af821fd6c79bc8dcd598ad71d55be8fc7/dm.xmlsec.binding-1.3.7.tar.gz";
464 464 sha256 = "03jjjscx1pz2nc0dwiw9nia02qbz1c6f0f9zkyr8fmvys2n5jkb3";
465 465 };
466 466 meta = {
467 467 license = [ pkgs.lib.licenses.bsdOriginal ];
468 468 };
469 469 };
470 470 "docutils" = super.buildPythonPackage {
471 471 name = "docutils-0.16";
472 472 doCheck = false;
473 473 src = fetchurl {
474 474 url = "https://files.pythonhosted.org/packages/2f/e0/3d435b34abd2d62e8206171892f174b180cd37b09d57b924ca5c2ef2219d/docutils-0.16.tar.gz";
475 475 sha256 = "1z3qliszqca9m719q3qhdkh0ghh90g500avzdgi7pl77x5h3mpn2";
476 476 };
477 477 meta = {
478 478 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.publicDomain pkgs.lib.licenses.gpl1 { fullName = "public domain, Python, 2-Clause BSD, GPL 3 (see COPYING.txt)"; } pkgs.lib.licenses.psfl ];
479 479 };
480 480 };
481 481 "dogpile.cache" = super.buildPythonPackage {
482 482 name = "dogpile.cache-0.9.0";
483 483 doCheck = false;
484 484 propagatedBuildInputs = [
485 485 self."decorator"
486 486 ];
487 487 src = fetchurl {
488 488 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
489 489 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
490 490 };
491 491 meta = {
492 492 license = [ pkgs.lib.licenses.bsdOriginal ];
493 493 };
494 494 };
495 495 "dogpile.core" = super.buildPythonPackage {
496 496 name = "dogpile.core-0.4.1";
497 497 doCheck = false;
498 498 src = fetchurl {
499 499 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
500 500 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
501 501 };
502 502 meta = {
503 503 license = [ pkgs.lib.licenses.bsdOriginal ];
504 504 };
505 505 };
506 506 "ecdsa" = super.buildPythonPackage {
507 507 name = "ecdsa-0.13.2";
508 508 doCheck = false;
509 509 src = fetchurl {
510 510 url = "https://files.pythonhosted.org/packages/51/76/139bf6e9b7b6684d5891212cdbd9e0739f2bfc03f380a1a6ffa700f392ac/ecdsa-0.13.2.tar.gz";
511 511 sha256 = "116qaq7bh4lcynzi613960jhsnn19v0kmsqwahiwjfj14gx4y0sw";
512 512 };
513 513 meta = {
514 514 license = [ pkgs.lib.licenses.mit ];
515 515 };
516 516 };
517 517 "elasticsearch" = super.buildPythonPackage {
518 518 name = "elasticsearch-6.3.1";
519 519 doCheck = false;
520 520 propagatedBuildInputs = [
521 521 self."urllib3"
522 522 ];
523 523 src = fetchurl {
524 524 url = "https://files.pythonhosted.org/packages/9d/ce/c4664e8380e379a9402ecfbaf158e56396da90d520daba21cfa840e0eb71/elasticsearch-6.3.1.tar.gz";
525 525 sha256 = "12y93v0yn7a4xmf969239g8gb3l4cdkclfpbk1qc8hx5qkymrnma";
526 526 };
527 527 meta = {
528 528 license = [ pkgs.lib.licenses.asl20 ];
529 529 };
530 530 };
531 531 "elasticsearch-dsl" = super.buildPythonPackage {
532 532 name = "elasticsearch-dsl-6.3.1";
533 533 doCheck = false;
534 534 propagatedBuildInputs = [
535 535 self."six"
536 536 self."python-dateutil"
537 537 self."elasticsearch"
538 538 self."ipaddress"
539 539 ];
540 540 src = fetchurl {
541 541 url = "https://files.pythonhosted.org/packages/4c/0d/1549f50c591db6bb4e66cbcc8d34a6e537c3d89aa426b167c244fd46420a/elasticsearch-dsl-6.3.1.tar.gz";
542 542 sha256 = "1gh8a0shqi105k325hgwb9avrpdjh0mc6mxwfg9ba7g6lssb702z";
543 543 };
544 544 meta = {
545 545 license = [ pkgs.lib.licenses.asl20 ];
546 546 };
547 547 };
548 548 "elasticsearch1" = super.buildPythonPackage {
549 549 name = "elasticsearch1-1.10.0";
550 550 doCheck = false;
551 551 propagatedBuildInputs = [
552 552 self."urllib3"
553 553 ];
554 554 src = fetchurl {
555 555 url = "https://files.pythonhosted.org/packages/a6/eb/73e75f9681fa71e3157b8ee878534235d57f24ee64f0e77f8d995fb57076/elasticsearch1-1.10.0.tar.gz";
556 556 sha256 = "0g89444kd5zwql4vbvyrmi2m6l6dcj6ga98j4hqxyyyz6z20aki2";
557 557 };
558 558 meta = {
559 559 license = [ pkgs.lib.licenses.asl20 ];
560 560 };
561 561 };
562 562 "elasticsearch1-dsl" = super.buildPythonPackage {
563 563 name = "elasticsearch1-dsl-0.0.12";
564 564 doCheck = false;
565 565 propagatedBuildInputs = [
566 566 self."six"
567 567 self."python-dateutil"
568 568 self."elasticsearch1"
569 569 ];
570 570 src = fetchurl {
571 571 url = "https://files.pythonhosted.org/packages/eb/9d/785342775cb10eddc9b8d7457d618a423b4f0b89d8b2b2d1bc27190d71db/elasticsearch1-dsl-0.0.12.tar.gz";
572 572 sha256 = "0ig1ly39v93hba0z975wnhbmzwj28w6w1sqlr2g7cn5spp732bhk";
573 573 };
574 574 meta = {
575 575 license = [ pkgs.lib.licenses.asl20 ];
576 576 };
577 577 };
578 578 "elasticsearch2" = super.buildPythonPackage {
579 579 name = "elasticsearch2-2.5.1";
580 580 doCheck = false;
581 581 propagatedBuildInputs = [
582 582 self."urllib3"
583 583 ];
584 584 src = fetchurl {
585 585 url = "https://files.pythonhosted.org/packages/f6/09/f9b24aa6b1120bea371cd57ef6f57c7694cf16660469456a8be6c2bdbe22/elasticsearch2-2.5.1.tar.gz";
586 586 sha256 = "19k2znpjfyp0hrq73cz7pjyj289040xpsxsm0xhh4jfh6y551g7k";
587 587 };
588 588 meta = {
589 589 license = [ pkgs.lib.licenses.asl20 ];
590 590 };
591 591 };
592 592 "entrypoints" = super.buildPythonPackage {
593 593 name = "entrypoints-0.2.2";
594 594 doCheck = false;
595 595 propagatedBuildInputs = [
596 596 self."configparser"
597 597 ];
598 598 src = fetchurl {
599 599 url = "https://code.rhodecode.com/upstream/entrypoints/artifacts/download/0-8e9ee9e4-c4db-409c-b07e-81568fd1832d.tar.gz?md5=3a027b8ff1d257b91fe257de6c43357d";
600 600 sha256 = "0qih72n2myclanplqipqxpgpj9d2yhff1pz5d02zq1cfqyd173w5";
601 601 };
602 602 meta = {
603 603 license = [ pkgs.lib.licenses.mit ];
604 604 };
605 605 };
606 606 "enum34" = super.buildPythonPackage {
607 607 name = "enum34-1.1.10";
608 608 doCheck = false;
609 609 src = fetchurl {
610 610 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
611 611 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
612 612 };
613 613 meta = {
614 614 license = [ pkgs.lib.licenses.bsdOriginal ];
615 615 };
616 616 };
617 617 "formencode" = super.buildPythonPackage {
618 618 name = "formencode-1.2.4";
619 619 doCheck = false;
620 620 src = fetchurl {
621 621 url = "https://files.pythonhosted.org/packages/8e/59/0174271a6f004512e0201188593e6d319db139d14cb7490e488bbb078015/FormEncode-1.2.4.tar.gz";
622 622 sha256 = "1fgy04sdy4yry5xcjls3x3xy30dqwj58ycnkndim819jx0788w42";
623 623 };
624 624 meta = {
625 625 license = [ pkgs.lib.licenses.psfl ];
626 626 };
627 627 };
628 628 "funcsigs" = super.buildPythonPackage {
629 629 name = "funcsigs-1.0.2";
630 630 doCheck = false;
631 631 src = fetchurl {
632 632 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
633 633 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
634 634 };
635 635 meta = {
636 636 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
637 637 };
638 638 };
639 639 "functools32" = super.buildPythonPackage {
640 640 name = "functools32-3.2.3.post2";
641 641 doCheck = false;
642 642 src = fetchurl {
643 643 url = "https://files.pythonhosted.org/packages/c5/60/6ac26ad05857c601308d8fb9e87fa36d0ebf889423f47c3502ef034365db/functools32-3.2.3-2.tar.gz";
644 644 sha256 = "0v8ya0b58x47wp216n1zamimv4iw57cxz3xxhzix52jkw3xks9gn";
645 645 };
646 646 meta = {
647 647 license = [ pkgs.lib.licenses.psfl ];
648 648 };
649 649 };
650 650 "future" = super.buildPythonPackage {
651 651 name = "future-0.14.3";
652 652 doCheck = false;
653 653 src = fetchurl {
654 654 url = "https://files.pythonhosted.org/packages/83/80/8ef3a11a15f8eaafafa0937b20c1b3f73527e69ab6b3fa1cf94a5a96aabb/future-0.14.3.tar.gz";
655 655 sha256 = "1savk7jx7hal032f522c5ajhh8fra6gmnadrj9adv5qxi18pv1b2";
656 656 };
657 657 meta = {
658 658 license = [ { fullName = "OSI Approved"; } pkgs.lib.licenses.mit ];
659 659 };
660 660 };
661 661 "futures" = super.buildPythonPackage {
662 662 name = "futures-3.0.2";
663 663 doCheck = false;
664 664 src = fetchurl {
665 665 url = "https://files.pythonhosted.org/packages/f8/e7/fc0fcbeb9193ba2d4de00b065e7fd5aecd0679e93ce95a07322b2b1434f4/futures-3.0.2.tar.gz";
666 666 sha256 = "0mz2pbgxbc2nbib1szifi07whjbfs4r02pv2z390z7p410awjgyw";
667 667 };
668 668 meta = {
669 669 license = [ pkgs.lib.licenses.bsdOriginal ];
670 670 };
671 671 };
672 672 "gevent" = super.buildPythonPackage {
673 673 name = "gevent-1.5.0";
674 674 doCheck = false;
675 675 propagatedBuildInputs = [
676 676 self."greenlet"
677 677 ];
678 678 src = fetchurl {
679 679 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
680 680 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
681 681 };
682 682 meta = {
683 683 license = [ pkgs.lib.licenses.mit ];
684 684 };
685 685 };
686 686 "gnureadline" = super.buildPythonPackage {
687 687 name = "gnureadline-6.3.8";
688 688 doCheck = false;
689 689 src = fetchurl {
690 690 url = "https://files.pythonhosted.org/packages/50/64/86085c823cd78f9df9d8e33dce0baa71618016f8860460b82cf6610e1eb3/gnureadline-6.3.8.tar.gz";
691 691 sha256 = "0ddhj98x2nv45iz4aadk4b9m0b1kpsn1xhcbypn5cd556knhiqjq";
692 692 };
693 693 meta = {
694 694 license = [ { fullName = "GNU General Public License v3 (GPLv3)"; } pkgs.lib.licenses.gpl1 ];
695 695 };
696 696 };
697 697 "gprof2dot" = super.buildPythonPackage {
698 698 name = "gprof2dot-2017.9.19";
699 699 doCheck = false;
700 700 src = fetchurl {
701 701 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
702 702 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
703 703 };
704 704 meta = {
705 705 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
706 706 };
707 707 };
708 708 "greenlet" = super.buildPythonPackage {
709 709 name = "greenlet-0.4.15";
710 710 doCheck = false;
711 711 src = fetchurl {
712 712 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
713 713 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
714 714 };
715 715 meta = {
716 716 license = [ pkgs.lib.licenses.mit ];
717 717 };
718 718 };
719 719 "gunicorn" = super.buildPythonPackage {
720 720 name = "gunicorn-19.9.0";
721 721 doCheck = false;
722 722 src = fetchurl {
723 723 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
724 724 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 "hupper" = super.buildPythonPackage {
731 731 name = "hupper-1.10.2";
732 732 doCheck = false;
733 733 src = fetchurl {
734 734 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
735 735 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
736 736 };
737 737 meta = {
738 738 license = [ pkgs.lib.licenses.mit ];
739 739 };
740 740 };
741 741 "idna" = super.buildPythonPackage {
742 742 name = "idna-2.8";
743 743 doCheck = false;
744 744 src = fetchurl {
745 745 url = "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz";
746 746 sha256 = "01rlkigdxg17sf9yar1jl8n18ls59367wqh59hnawlyg53vb6my3";
747 747 };
748 748 meta = {
749 749 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD-like"; } ];
750 750 };
751 751 };
752 752 "importlib-metadata" = super.buildPythonPackage {
753 753 name = "importlib-metadata-1.6.0";
754 754 doCheck = false;
755 755 propagatedBuildInputs = [
756 756 self."zipp"
757 757 self."pathlib2"
758 758 self."contextlib2"
759 759 self."configparser"
760 760 ];
761 761 src = fetchurl {
762 762 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
763 763 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.asl20 ];
767 767 };
768 768 };
769 769 "infrae.cache" = super.buildPythonPackage {
770 770 name = "infrae.cache-1.0.1";
771 771 doCheck = false;
772 772 propagatedBuildInputs = [
773 773 self."beaker"
774 774 self."repoze.lru"
775 775 ];
776 776 src = fetchurl {
777 777 url = "https://files.pythonhosted.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
778 778 sha256 = "1dvqsjn8vw253wz9d1pz17j79mf4bs53dvp2qxck2qdp1am1njw4";
779 779 };
780 780 meta = {
781 781 license = [ pkgs.lib.licenses.zpl21 ];
782 782 };
783 783 };
784 784 "invoke" = super.buildPythonPackage {
785 785 name = "invoke-0.13.0";
786 786 doCheck = false;
787 787 src = fetchurl {
788 788 url = "https://files.pythonhosted.org/packages/47/bf/d07ef52fa1ac645468858bbac7cb95b246a972a045e821493d17d89c81be/invoke-0.13.0.tar.gz";
789 789 sha256 = "0794vhgxfmkh0vzkkg5cfv1w82g3jc3xr18wim29far9qpx9468s";
790 790 };
791 791 meta = {
792 792 license = [ pkgs.lib.licenses.bsdOriginal ];
793 793 };
794 794 };
795 795 "ipaddress" = super.buildPythonPackage {
796 796 name = "ipaddress-1.0.23";
797 797 doCheck = false;
798 798 src = fetchurl {
799 799 url = "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz";
800 800 sha256 = "1qp743h30s04m3cg3yk3fycad930jv17q7dsslj4mfw0jlvf1y5p";
801 801 };
802 802 meta = {
803 803 license = [ pkgs.lib.licenses.psfl ];
804 804 };
805 805 };
806 806 "ipdb" = super.buildPythonPackage {
807 807 name = "ipdb-0.13.2";
808 808 doCheck = false;
809 809 propagatedBuildInputs = [
810 810 self."setuptools"
811 811 self."ipython"
812 812 ];
813 813 src = fetchurl {
814 814 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
815 815 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.bsdOriginal ];
819 819 };
820 820 };
821 821 "ipython" = super.buildPythonPackage {
822 822 name = "ipython-5.1.0";
823 823 doCheck = false;
824 824 propagatedBuildInputs = [
825 825 self."setuptools"
826 826 self."decorator"
827 827 self."pickleshare"
828 828 self."simplegeneric"
829 829 self."traitlets"
830 830 self."prompt-toolkit"
831 831 self."pygments"
832 832 self."pexpect"
833 833 self."backports.shutil-get-terminal-size"
834 834 self."pathlib2"
835 835 self."pexpect"
836 836 ];
837 837 src = fetchurl {
838 838 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
839 839 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
840 840 };
841 841 meta = {
842 842 license = [ pkgs.lib.licenses.bsdOriginal ];
843 843 };
844 844 };
845 845 "ipython-genutils" = super.buildPythonPackage {
846 846 name = "ipython-genutils-0.2.0";
847 847 doCheck = false;
848 848 src = fetchurl {
849 849 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
850 850 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
851 851 };
852 852 meta = {
853 853 license = [ pkgs.lib.licenses.bsdOriginal ];
854 854 };
855 855 };
856 856 "iso8601" = super.buildPythonPackage {
857 857 name = "iso8601-0.1.12";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz";
861 861 sha256 = "10nyvvnrhw2w3p09v1ica4lgj6f4g9j3kkfx17qmraiq3w7b5i29";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.mit ];
865 865 };
866 866 };
867 867 "isodate" = super.buildPythonPackage {
868 868 name = "isodate-0.6.0";
869 869 doCheck = false;
870 870 propagatedBuildInputs = [
871 871 self."six"
872 872 ];
873 873 src = fetchurl {
874 874 url = "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz";
875 875 sha256 = "1n7jkz68kk5pwni540pr5zdh99bf6ywydk1p5pdrqisrawylldif";
876 876 };
877 877 meta = {
878 878 license = [ pkgs.lib.licenses.bsdOriginal ];
879 879 };
880 880 };
881 881 "itsdangerous" = super.buildPythonPackage {
882 882 name = "itsdangerous-1.1.0";
883 883 doCheck = false;
884 884 src = fetchurl {
885 885 url = "https://files.pythonhosted.org/packages/68/1a/f27de07a8a304ad5fa817bbe383d1238ac4396da447fa11ed937039fa04b/itsdangerous-1.1.0.tar.gz";
886 886 sha256 = "068zpbksq5q2z4dckh2k1zbcq43ay74ylqn77rni797j0wyh66rj";
887 887 };
888 888 meta = {
889 889 license = [ pkgs.lib.licenses.bsdOriginal ];
890 890 };
891 891 };
892 892 "jinja2" = super.buildPythonPackage {
893 893 name = "jinja2-2.9.6";
894 894 doCheck = false;
895 895 propagatedBuildInputs = [
896 896 self."markupsafe"
897 897 ];
898 898 src = fetchurl {
899 899 url = "https://files.pythonhosted.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
900 900 sha256 = "1zzrkywhziqffrzks14kzixz7nd4yh2vc0fb04a68vfd2ai03anx";
901 901 };
902 902 meta = {
903 903 license = [ pkgs.lib.licenses.bsdOriginal ];
904 904 };
905 905 };
906 906 "jsonschema" = super.buildPythonPackage {
907 907 name = "jsonschema-2.6.0";
908 908 doCheck = false;
909 909 propagatedBuildInputs = [
910 910 self."functools32"
911 911 ];
912 912 src = fetchurl {
913 913 url = "https://files.pythonhosted.org/packages/58/b9/171dbb07e18c6346090a37f03c7e74410a1a56123f847efed59af260a298/jsonschema-2.6.0.tar.gz";
914 914 sha256 = "00kf3zmpp9ya4sydffpifn0j0mzm342a2vzh82p6r0vh10cg7xbg";
915 915 };
916 916 meta = {
917 917 license = [ pkgs.lib.licenses.mit ];
918 918 };
919 919 };
920 920 "jupyter-client" = super.buildPythonPackage {
921 921 name = "jupyter-client-5.0.0";
922 922 doCheck = false;
923 923 propagatedBuildInputs = [
924 924 self."traitlets"
925 925 self."jupyter-core"
926 926 self."pyzmq"
927 927 self."python-dateutil"
928 928 ];
929 929 src = fetchurl {
930 930 url = "https://files.pythonhosted.org/packages/e5/6f/65412ed462202b90134b7e761b0b7e7f949e07a549c1755475333727b3d0/jupyter_client-5.0.0.tar.gz";
931 931 sha256 = "0nxw4rqk4wsjhc87gjqd7pv89cb9dnimcfnmcmp85bmrvv1gjri7";
932 932 };
933 933 meta = {
934 934 license = [ pkgs.lib.licenses.bsdOriginal ];
935 935 };
936 936 };
937 937 "jupyter-core" = super.buildPythonPackage {
938 938 name = "jupyter-core-4.5.0";
939 939 doCheck = false;
940 940 propagatedBuildInputs = [
941 941 self."traitlets"
942 942 ];
943 943 src = fetchurl {
944 944 url = "https://files.pythonhosted.org/packages/4a/de/ff4ca734656d17ebe0450807b59d728f45277e2e7f4b82bc9aae6cb82961/jupyter_core-4.5.0.tar.gz";
945 945 sha256 = "1xr4pbghwk5hayn5wwnhb7z95380r45p79gf5if5pi1akwg7qvic";
946 946 };
947 947 meta = {
948 948 license = [ pkgs.lib.licenses.bsdOriginal ];
949 949 };
950 950 };
951 951 "kombu" = super.buildPythonPackage {
952 952 name = "kombu-4.6.6";
953 953 doCheck = false;
954 954 propagatedBuildInputs = [
955 955 self."amqp"
956 956 self."importlib-metadata"
957 957 ];
958 958 src = fetchurl {
959 959 url = "https://files.pythonhosted.org/packages/20/e6/bc2d9affba6138a1dc143f77fef253e9e08e238fa7c0688d917c09005e96/kombu-4.6.6.tar.gz";
960 960 sha256 = "11mxpcy8mg1l35bgbhba70v29bydr2hrhdbdlb4lg98m3m5vaq0p";
961 961 };
962 962 meta = {
963 963 license = [ pkgs.lib.licenses.bsdOriginal ];
964 964 };
965 965 };
966 966 "lxml" = super.buildPythonPackage {
967 967 name = "lxml-4.2.5";
968 968 doCheck = false;
969 969 src = fetchurl {
970 970 url = "https://files.pythonhosted.org/packages/4b/20/ddf5eb3bd5c57582d2b4652b4bbcf8da301bdfe5d805cb94e805f4d7464d/lxml-4.2.5.tar.gz";
971 971 sha256 = "0zw0y9hs0nflxhl9cs6ipwwh53szi3w2x06wl0k9cylyqac0cwin";
972 972 };
973 973 meta = {
974 974 license = [ pkgs.lib.licenses.bsdOriginal ];
975 975 };
976 976 };
977 977 "mako" = super.buildPythonPackage {
978 978 name = "mako-1.1.0";
979 979 doCheck = false;
980 980 propagatedBuildInputs = [
981 981 self."markupsafe"
982 982 ];
983 983 src = fetchurl {
984 984 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
985 985 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
986 986 };
987 987 meta = {
988 988 license = [ pkgs.lib.licenses.mit ];
989 989 };
990 990 };
991 991 "markdown" = super.buildPythonPackage {
992 992 name = "markdown-2.6.11";
993 993 doCheck = false;
994 994 src = fetchurl {
995 995 url = "https://files.pythonhosted.org/packages/b3/73/fc5c850f44af5889192dff783b7b0d8f3fe8d30b65c8e3f78f8f0265fecf/Markdown-2.6.11.tar.gz";
996 996 sha256 = "108g80ryzykh8bj0i7jfp71510wrcixdi771lf2asyghgyf8cmm8";
997 997 };
998 998 meta = {
999 999 license = [ pkgs.lib.licenses.bsdOriginal ];
1000 1000 };
1001 1001 };
1002 1002 "markupsafe" = super.buildPythonPackage {
1003 1003 name = "markupsafe-1.1.1";
1004 1004 doCheck = false;
1005 1005 src = fetchurl {
1006 1006 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
1007 1007 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
1008 1008 };
1009 1009 meta = {
1010 1010 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
1011 1011 };
1012 1012 };
1013 1013 "marshmallow" = super.buildPythonPackage {
1014 1014 name = "marshmallow-2.18.0";
1015 1015 doCheck = false;
1016 1016 src = fetchurl {
1017 1017 url = "https://files.pythonhosted.org/packages/ad/0b/5799965d1c6d5f608d684e2c0dce8a828e0309a3bfe8327d9418a89f591c/marshmallow-2.18.0.tar.gz";
1018 1018 sha256 = "1g0aafpjn7yaxq06yndy8c7rs9n42adxkqq1ayhlr869pr06d3lm";
1019 1019 };
1020 1020 meta = {
1021 1021 license = [ pkgs.lib.licenses.mit ];
1022 1022 };
1023 1023 };
1024 1024 "mistune" = super.buildPythonPackage {
1025 1025 name = "mistune-0.8.4";
1026 1026 doCheck = false;
1027 1027 src = fetchurl {
1028 1028 url = "https://files.pythonhosted.org/packages/2d/a4/509f6e7783ddd35482feda27bc7f72e65b5e7dc910eca4ab2164daf9c577/mistune-0.8.4.tar.gz";
1029 1029 sha256 = "0vkmsh0x480rni51lhyvigfdf06b9247z868pk3bal1wnnfl58sr";
1030 1030 };
1031 1031 meta = {
1032 1032 license = [ pkgs.lib.licenses.bsdOriginal ];
1033 1033 };
1034 1034 };
1035 1035 "mock" = super.buildPythonPackage {
1036 1036 name = "mock-3.0.5";
1037 1037 doCheck = false;
1038 1038 propagatedBuildInputs = [
1039 1039 self."six"
1040 1040 self."funcsigs"
1041 1041 ];
1042 1042 src = fetchurl {
1043 1043 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
1044 1044 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
1045 1045 };
1046 1046 meta = {
1047 1047 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
1048 1048 };
1049 1049 };
1050 1050 "more-itertools" = super.buildPythonPackage {
1051 1051 name = "more-itertools-5.0.0";
1052 1052 doCheck = false;
1053 1053 propagatedBuildInputs = [
1054 1054 self."six"
1055 1055 ];
1056 1056 src = fetchurl {
1057 1057 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
1058 1058 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
1059 1059 };
1060 1060 meta = {
1061 1061 license = [ pkgs.lib.licenses.mit ];
1062 1062 };
1063 1063 };
1064 1064 "msgpack-python" = super.buildPythonPackage {
1065 1065 name = "msgpack-python-0.5.6";
1066 1066 doCheck = false;
1067 1067 src = fetchurl {
1068 1068 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
1069 1069 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
1070 1070 };
1071 1071 meta = {
1072 1072 license = [ pkgs.lib.licenses.asl20 ];
1073 1073 };
1074 1074 };
1075 1075 "mysql-python" = super.buildPythonPackage {
1076 1076 name = "mysql-python-1.2.5";
1077 1077 doCheck = false;
1078 1078 src = fetchurl {
1079 1079 url = "https://files.pythonhosted.org/packages/a5/e9/51b544da85a36a68debe7a7091f068d802fc515a3a202652828c73453cad/MySQL-python-1.2.5.zip";
1080 1080 sha256 = "0x0c2jg0bb3pp84njaqiic050qkyd7ymwhfvhipnimg58yv40441";
1081 1081 };
1082 1082 meta = {
1083 1083 license = [ pkgs.lib.licenses.gpl1 ];
1084 1084 };
1085 1085 };
1086 1086 "nbconvert" = super.buildPythonPackage {
1087 1087 name = "nbconvert-5.3.1";
1088 1088 doCheck = false;
1089 1089 propagatedBuildInputs = [
1090 1090 self."mistune"
1091 1091 self."jinja2"
1092 1092 self."pygments"
1093 1093 self."traitlets"
1094 1094 self."jupyter-core"
1095 1095 self."nbformat"
1096 1096 self."entrypoints"
1097 1097 self."bleach"
1098 1098 self."pandocfilters"
1099 1099 self."testpath"
1100 1100 ];
1101 1101 src = fetchurl {
1102 1102 url = "https://files.pythonhosted.org/packages/b9/a4/d0a0938ad6f5eeb4dea4e73d255c617ef94b0b2849d51194c9bbdb838412/nbconvert-5.3.1.tar.gz";
1103 1103 sha256 = "1f9dkvpx186xjm4xab0qbph588mncp4vqk3fmxrsnqs43mks9c8j";
1104 1104 };
1105 1105 meta = {
1106 1106 license = [ pkgs.lib.licenses.bsdOriginal ];
1107 1107 };
1108 1108 };
1109 1109 "nbformat" = super.buildPythonPackage {
1110 1110 name = "nbformat-4.4.0";
1111 1111 doCheck = false;
1112 1112 propagatedBuildInputs = [
1113 1113 self."ipython-genutils"
1114 1114 self."traitlets"
1115 1115 self."jsonschema"
1116 1116 self."jupyter-core"
1117 1117 ];
1118 1118 src = fetchurl {
1119 1119 url = "https://files.pythonhosted.org/packages/6e/0e/160754f7ae3e984863f585a3743b0ed1702043a81245907c8fae2d537155/nbformat-4.4.0.tar.gz";
1120 1120 sha256 = "00nlf08h8yc4q73nphfvfhxrcnilaqanb8z0mdy6nxk0vzq4wjgp";
1121 1121 };
1122 1122 meta = {
1123 1123 license = [ pkgs.lib.licenses.bsdOriginal ];
1124 1124 };
1125 1125 };
1126 1126 "packaging" = super.buildPythonPackage {
1127 1127 name = "packaging-20.3";
1128 1128 doCheck = false;
1129 1129 propagatedBuildInputs = [
1130 1130 self."pyparsing"
1131 1131 self."six"
1132 1132 ];
1133 1133 src = fetchurl {
1134 1134 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
1135 1135 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
1136 1136 };
1137 1137 meta = {
1138 1138 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
1139 1139 };
1140 1140 };
1141 1141 "pandocfilters" = super.buildPythonPackage {
1142 1142 name = "pandocfilters-1.4.2";
1143 1143 doCheck = false;
1144 1144 src = fetchurl {
1145 1145 url = "https://files.pythonhosted.org/packages/4c/ea/236e2584af67bb6df960832731a6e5325fd4441de001767da328c33368ce/pandocfilters-1.4.2.tar.gz";
1146 1146 sha256 = "1a8d9b7s48gmq9zj0pmbyv2sivn5i7m6mybgpkk4jm5vd7hp1pdk";
1147 1147 };
1148 1148 meta = {
1149 1149 license = [ pkgs.lib.licenses.bsdOriginal ];
1150 1150 };
1151 1151 };
1152 1152 "paste" = super.buildPythonPackage {
1153 1153 name = "paste-3.4.0";
1154 1154 doCheck = false;
1155 1155 propagatedBuildInputs = [
1156 1156 self."six"
1157 1157 ];
1158 1158 src = fetchurl {
1159 1159 url = "https://files.pythonhosted.org/packages/79/4a/45821b71dd40000507549afd1491546afad8279c0a87527c88776a794158/Paste-3.4.0.tar.gz";
1160 1160 sha256 = "16sichvhyci1gaarkjs35mai8vphh7b244qm14hj1isw38nx4c03";
1161 1161 };
1162 1162 meta = {
1163 1163 license = [ pkgs.lib.licenses.mit ];
1164 1164 };
1165 1165 };
1166 1166 "pastedeploy" = super.buildPythonPackage {
1167 1167 name = "pastedeploy-2.1.0";
1168 1168 doCheck = false;
1169 1169 src = fetchurl {
1170 1170 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
1171 1171 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
1172 1172 };
1173 1173 meta = {
1174 1174 license = [ pkgs.lib.licenses.mit ];
1175 1175 };
1176 1176 };
1177 1177 "pastescript" = super.buildPythonPackage {
1178 1178 name = "pastescript-3.2.0";
1179 1179 doCheck = false;
1180 1180 propagatedBuildInputs = [
1181 1181 self."paste"
1182 1182 self."pastedeploy"
1183 1183 self."six"
1184 1184 ];
1185 1185 src = fetchurl {
1186 1186 url = "https://files.pythonhosted.org/packages/ff/47/45c6f5a3cb8f5abf786fea98dbb8d02400a55768a9b623afb7df12346c61/PasteScript-3.2.0.tar.gz";
1187 1187 sha256 = "1b3jq7xh383nvrrlblk05m37345bv97xrhx77wshllba3h7mq3wv";
1188 1188 };
1189 1189 meta = {
1190 1190 license = [ pkgs.lib.licenses.mit ];
1191 1191 };
1192 1192 };
1193 1193 "pathlib2" = super.buildPythonPackage {
1194 1194 name = "pathlib2-2.3.5";
1195 1195 doCheck = false;
1196 1196 propagatedBuildInputs = [
1197 1197 self."six"
1198 1198 self."scandir"
1199 1199 ];
1200 1200 src = fetchurl {
1201 1201 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
1202 1202 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
1203 1203 };
1204 1204 meta = {
1205 1205 license = [ pkgs.lib.licenses.mit ];
1206 1206 };
1207 1207 };
1208 1208 "peppercorn" = super.buildPythonPackage {
1209 1209 name = "peppercorn-0.6";
1210 1210 doCheck = false;
1211 1211 src = fetchurl {
1212 1212 url = "https://files.pythonhosted.org/packages/e4/77/93085de7108cdf1a0b092ff443872a8f9442c736d7ddebdf2f27627935f4/peppercorn-0.6.tar.gz";
1213 1213 sha256 = "1ip4bfwcpwkq9hz2dai14k2cyabvwrnvcvrcmzxmqm04g8fnimwn";
1214 1214 };
1215 1215 meta = {
1216 1216 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1217 1217 };
1218 1218 };
1219 1219 "pexpect" = super.buildPythonPackage {
1220 1220 name = "pexpect-4.8.0";
1221 1221 doCheck = false;
1222 1222 propagatedBuildInputs = [
1223 1223 self."ptyprocess"
1224 1224 ];
1225 1225 src = fetchurl {
1226 1226 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
1227 1227 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
1228 1228 };
1229 1229 meta = {
1230 1230 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
1231 1231 };
1232 1232 };
1233 1233 "pickleshare" = super.buildPythonPackage {
1234 1234 name = "pickleshare-0.7.5";
1235 1235 doCheck = false;
1236 1236 propagatedBuildInputs = [
1237 1237 self."pathlib2"
1238 1238 ];
1239 1239 src = fetchurl {
1240 1240 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
1241 1241 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
1242 1242 };
1243 1243 meta = {
1244 1244 license = [ pkgs.lib.licenses.mit ];
1245 1245 };
1246 1246 };
1247 1247 "plaster" = super.buildPythonPackage {
1248 1248 name = "plaster-1.0";
1249 1249 doCheck = false;
1250 1250 propagatedBuildInputs = [
1251 1251 self."setuptools"
1252 1252 ];
1253 1253 src = fetchurl {
1254 1254 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
1255 1255 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
1256 1256 };
1257 1257 meta = {
1258 1258 license = [ pkgs.lib.licenses.mit ];
1259 1259 };
1260 1260 };
1261 1261 "plaster-pastedeploy" = super.buildPythonPackage {
1262 1262 name = "plaster-pastedeploy-0.7";
1263 1263 doCheck = false;
1264 1264 propagatedBuildInputs = [
1265 1265 self."pastedeploy"
1266 1266 self."plaster"
1267 1267 ];
1268 1268 src = fetchurl {
1269 1269 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
1270 1270 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
1271 1271 };
1272 1272 meta = {
1273 1273 license = [ pkgs.lib.licenses.mit ];
1274 1274 };
1275 1275 };
1276 1276 "pluggy" = super.buildPythonPackage {
1277 1277 name = "pluggy-0.13.1";
1278 1278 doCheck = false;
1279 1279 propagatedBuildInputs = [
1280 1280 self."importlib-metadata"
1281 1281 ];
1282 1282 src = fetchurl {
1283 1283 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
1284 1284 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
1285 1285 };
1286 1286 meta = {
1287 1287 license = [ pkgs.lib.licenses.mit ];
1288 1288 };
1289 1289 };
1290 1290 "premailer" = super.buildPythonPackage {
1291 1291 name = "premailer-3.6.1";
1292 1292 doCheck = false;
1293 1293 propagatedBuildInputs = [
1294 1294 self."lxml"
1295 1295 self."cssselect"
1296 1296 self."cssutils"
1297 1297 self."requests"
1298 1298 self."cachetools"
1299 1299 ];
1300 1300 src = fetchurl {
1301 1301 url = "https://files.pythonhosted.org/packages/62/da/2f43cdf9d3d79c80c4856a12389a1f257d65fe9ccc44bc6b4383c8a18e33/premailer-3.6.1.tar.gz";
1302 1302 sha256 = "08pshx7a110k4ll20x0xhpvyn3kkipkrbgxjjn7ncdxs54ihdhgw";
1303 1303 };
1304 1304 meta = {
1305 1305 license = [ pkgs.lib.licenses.psfl { fullName = "Python"; } ];
1306 1306 };
1307 1307 };
1308 1308 "prompt-toolkit" = super.buildPythonPackage {
1309 1309 name = "prompt-toolkit-1.0.18";
1310 1310 doCheck = false;
1311 1311 propagatedBuildInputs = [
1312 1312 self."six"
1313 1313 self."wcwidth"
1314 1314 ];
1315 1315 src = fetchurl {
1316 1316 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
1317 1317 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
1318 1318 };
1319 1319 meta = {
1320 1320 license = [ pkgs.lib.licenses.bsdOriginal ];
1321 1321 };
1322 1322 };
1323 1323 "psutil" = super.buildPythonPackage {
1324 1324 name = "psutil-5.7.0";
1325 1325 doCheck = false;
1326 1326 src = fetchurl {
1327 1327 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
1328 1328 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
1329 1329 };
1330 1330 meta = {
1331 1331 license = [ pkgs.lib.licenses.bsdOriginal ];
1332 1332 };
1333 1333 };
1334 1334 "psycopg2" = super.buildPythonPackage {
1335 1335 name = "psycopg2-2.8.4";
1336 1336 doCheck = false;
1337 1337 src = fetchurl {
1338 1338 url = "https://files.pythonhosted.org/packages/84/d7/6a93c99b5ba4d4d22daa3928b983cec66df4536ca50b22ce5dcac65e4e71/psycopg2-2.8.4.tar.gz";
1339 1339 sha256 = "1djvh98pi4hjd8rxbq8qzc63bg8v78k33yg6pl99wak61b6fb67q";
1340 1340 };
1341 1341 meta = {
1342 1342 license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ];
1343 1343 };
1344 1344 };
1345 1345 "ptyprocess" = super.buildPythonPackage {
1346 1346 name = "ptyprocess-0.6.0";
1347 1347 doCheck = false;
1348 1348 src = fetchurl {
1349 1349 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
1350 1350 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
1351 1351 };
1352 1352 meta = {
1353 1353 license = [ ];
1354 1354 };
1355 1355 };
1356 1356 "py" = super.buildPythonPackage {
1357 1357 name = "py-1.8.0";
1358 1358 doCheck = false;
1359 1359 src = fetchurl {
1360 1360 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
1361 1361 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
1362 1362 };
1363 1363 meta = {
1364 1364 license = [ pkgs.lib.licenses.mit ];
1365 1365 };
1366 1366 };
1367 1367 "py-bcrypt" = super.buildPythonPackage {
1368 1368 name = "py-bcrypt-0.4";
1369 1369 doCheck = false;
1370 1370 src = fetchurl {
1371 1371 url = "https://files.pythonhosted.org/packages/68/b1/1c3068c5c4d2e35c48b38dcc865301ebfdf45f54507086ac65ced1fd3b3d/py-bcrypt-0.4.tar.gz";
1372 1372 sha256 = "0y6smdggwi5s72v6p1nn53dg6w05hna3d264cq6kas0lap73p8az";
1373 1373 };
1374 1374 meta = {
1375 1375 license = [ pkgs.lib.licenses.bsdOriginal ];
1376 1376 };
1377 1377 };
1378 1378 "py-gfm" = super.buildPythonPackage {
1379 1379 name = "py-gfm-0.1.4";
1380 1380 doCheck = false;
1381 1381 propagatedBuildInputs = [
1382 1382 self."setuptools"
1383 1383 self."markdown"
1384 1384 ];
1385 1385 src = fetchurl {
1386 1386 url = "https://files.pythonhosted.org/packages/06/ee/004a03a1d92bb386dae44f6dd087db541bc5093374f1637d4d4ae5596cc2/py-gfm-0.1.4.tar.gz";
1387 1387 sha256 = "0zip06g2isivx8fzgqd4n9qzsa22c25jas1rsb7m2rnjg72m0rzg";
1388 1388 };
1389 1389 meta = {
1390 1390 license = [ pkgs.lib.licenses.bsdOriginal ];
1391 1391 };
1392 1392 };
1393 1393 "pyasn1" = super.buildPythonPackage {
1394 1394 name = "pyasn1-0.4.8";
1395 1395 doCheck = false;
1396 1396 src = fetchurl {
1397 1397 url = "https://files.pythonhosted.org/packages/a4/db/fffec68299e6d7bad3d504147f9094830b704527a7fc098b721d38cc7fa7/pyasn1-0.4.8.tar.gz";
1398 1398 sha256 = "1fnhbi3rmk47l9851gbik0flfr64vs5j0hbqx24cafjap6gprxxf";
1399 1399 };
1400 1400 meta = {
1401 1401 license = [ pkgs.lib.licenses.bsdOriginal ];
1402 1402 };
1403 1403 };
1404 1404 "pyasn1-modules" = super.buildPythonPackage {
1405 1405 name = "pyasn1-modules-0.2.6";
1406 1406 doCheck = false;
1407 1407 propagatedBuildInputs = [
1408 1408 self."pyasn1"
1409 1409 ];
1410 1410 src = fetchurl {
1411 1411 url = "https://files.pythonhosted.org/packages/f1/a9/a1ef72a0e43feff643cf0130a08123dea76205e7a0dda37e3efb5f054a31/pyasn1-modules-0.2.6.tar.gz";
1412 1412 sha256 = "08hph9j1r018drnrny29l7dl2q0cin78csswrhwrh8jmq61pmha3";
1413 1413 };
1414 1414 meta = {
1415 1415 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
1416 1416 };
1417 1417 };
1418 1418 "pycparser" = super.buildPythonPackage {
1419 1419 name = "pycparser-2.20";
1420 1420 doCheck = false;
1421 1421 src = fetchurl {
1422 1422 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
1423 1423 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
1424 1424 };
1425 1425 meta = {
1426 1426 license = [ pkgs.lib.licenses.bsdOriginal ];
1427 1427 };
1428 1428 };
1429 1429 "pycrypto" = super.buildPythonPackage {
1430 1430 name = "pycrypto-2.6.1";
1431 1431 doCheck = false;
1432 1432 src = fetchurl {
1433 1433 url = "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz";
1434 1434 sha256 = "0g0ayql5b9mkjam8hym6zyg6bv77lbh66rv1fyvgqb17kfc1xkpj";
1435 1435 };
1436 1436 meta = {
1437 1437 license = [ pkgs.lib.licenses.publicDomain ];
1438 1438 };
1439 1439 };
1440 1440 "pycurl" = super.buildPythonPackage {
1441 1441 name = "pycurl-7.43.0.3";
1442 1442 doCheck = false;
1443 1443 src = fetchurl {
1444 1444 url = "https://files.pythonhosted.org/packages/ac/b3/0f3979633b7890bab6098d84c84467030b807a1e2b31f5d30103af5a71ca/pycurl-7.43.0.3.tar.gz";
1445 1445 sha256 = "13nsvqhvnmnvfk75s8iynqsgszyv06cjp4drd3psi7zpbh63623g";
1446 1446 };
1447 1447 meta = {
1448 1448 license = [ pkgs.lib.licenses.mit { fullName = "LGPL/MIT"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1449 1449 };
1450 1450 };
1451 1451 "pygments" = super.buildPythonPackage {
1452 1452 name = "pygments-2.4.2";
1453 1453 doCheck = false;
1454 1454 src = fetchurl {
1455 1455 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
1456 1456 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
1457 1457 };
1458 1458 meta = {
1459 1459 license = [ pkgs.lib.licenses.bsdOriginal ];
1460 1460 };
1461 1461 };
1462 1462 "pymysql" = super.buildPythonPackage {
1463 1463 name = "pymysql-0.8.1";
1464 1464 doCheck = false;
1465 1465 src = fetchurl {
1466 1466 url = "https://files.pythonhosted.org/packages/44/39/6bcb83cae0095a31b6be4511707fdf2009d3e29903a55a0494d3a9a2fac0/PyMySQL-0.8.1.tar.gz";
1467 1467 sha256 = "0a96crz55bw4h6myh833skrli7b0ck89m3x673y2z2ryy7zrpq9l";
1468 1468 };
1469 1469 meta = {
1470 1470 license = [ pkgs.lib.licenses.mit ];
1471 1471 };
1472 1472 };
1473 1473 "pyotp" = super.buildPythonPackage {
1474 1474 name = "pyotp-2.3.0";
1475 1475 doCheck = false;
1476 1476 src = fetchurl {
1477 1477 url = "https://files.pythonhosted.org/packages/f7/15/395c4945ea6bc37e8811280bb675615cb4c2b2c1cd70bdc43329da91a386/pyotp-2.3.0.tar.gz";
1478 1478 sha256 = "18d13ikra1iq0xyfqfm72zhgwxi2qi9ps6z1a6zmqp4qrn57wlzw";
1479 1479 };
1480 1480 meta = {
1481 1481 license = [ pkgs.lib.licenses.mit ];
1482 1482 };
1483 1483 };
1484 1484 "pyparsing" = super.buildPythonPackage {
1485 1485 name = "pyparsing-2.4.7";
1486 1486 doCheck = false;
1487 1487 src = fetchurl {
1488 1488 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
1489 1489 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
1490 1490 };
1491 1491 meta = {
1492 1492 license = [ pkgs.lib.licenses.mit ];
1493 1493 };
1494 1494 };
1495 1495 "pyramid" = super.buildPythonPackage {
1496 1496 name = "pyramid-1.10.4";
1497 1497 doCheck = false;
1498 1498 propagatedBuildInputs = [
1499 1499 self."hupper"
1500 1500 self."plaster"
1501 1501 self."plaster-pastedeploy"
1502 1502 self."setuptools"
1503 1503 self."translationstring"
1504 1504 self."venusian"
1505 1505 self."webob"
1506 1506 self."zope.deprecation"
1507 1507 self."zope.interface"
1508 1508 self."repoze.lru"
1509 1509 ];
1510 1510 src = fetchurl {
1511 1511 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
1512 1512 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
1513 1513 };
1514 1514 meta = {
1515 1515 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1516 1516 };
1517 1517 };
1518 1518 "pyramid-debugtoolbar" = super.buildPythonPackage {
1519 1519 name = "pyramid-debugtoolbar-4.6.1";
1520 1520 doCheck = false;
1521 1521 propagatedBuildInputs = [
1522 1522 self."pyramid"
1523 1523 self."pyramid-mako"
1524 1524 self."repoze.lru"
1525 1525 self."pygments"
1526 1526 self."ipaddress"
1527 1527 ];
1528 1528 src = fetchurl {
1529 1529 url = "https://files.pythonhosted.org/packages/99/f6/b8603f82c18275be293921bc3a2184205056ca505747bf64ab8a0c08e124/pyramid_debugtoolbar-4.6.1.tar.gz";
1530 1530 sha256 = "185z7q8n959ga5331iczwra2iljwkidfx4qn6bbd7vm3rm4w6llv";
1531 1531 };
1532 1532 meta = {
1533 1533 license = [ { fullName = "Repoze Public License"; } pkgs.lib.licenses.bsdOriginal ];
1534 1534 };
1535 1535 };
1536 1536 "pyramid-jinja2" = super.buildPythonPackage {
1537 1537 name = "pyramid-jinja2-2.7";
1538 1538 doCheck = false;
1539 1539 propagatedBuildInputs = [
1540 1540 self."pyramid"
1541 1541 self."zope.deprecation"
1542 1542 self."jinja2"
1543 1543 self."markupsafe"
1544 1544 ];
1545 1545 src = fetchurl {
1546 1546 url = "https://files.pythonhosted.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
1547 1547 sha256 = "1sz5s0pp5jqhf4w22w9527yz8hgdi4mhr6apd6vw1gm5clghh8aw";
1548 1548 };
1549 1549 meta = {
1550 1550 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1551 1551 };
1552 1552 };
1553 1553 "pyramid-apispec" = super.buildPythonPackage {
1554 1554 name = "pyramid-apispec-0.3.2";
1555 1555 doCheck = false;
1556 1556 propagatedBuildInputs = [
1557 1557 self."apispec"
1558 1558 ];
1559 1559 src = fetchurl {
1560 1560 url = "https://files.pythonhosted.org/packages/2a/30/1dea5d81ea635449572ba60ec3148310d75ae4530c3c695f54b0991bb8c7/pyramid_apispec-0.3.2.tar.gz";
1561 1561 sha256 = "0ffrcqp9dkykivhfcq0v9lgy6w0qhwl6x78925vfjmayly9r8da0";
1562 1562 };
1563 1563 meta = {
1564 1564 license = [ pkgs.lib.licenses.bsdOriginal ];
1565 1565 };
1566 1566 };
1567 1567 "pyramid-mailer" = super.buildPythonPackage {
1568 1568 name = "pyramid-mailer-0.15.1";
1569 1569 doCheck = false;
1570 1570 propagatedBuildInputs = [
1571 1571 self."pyramid"
1572 1572 self."repoze.sendmail"
1573 1573 self."transaction"
1574 1574 ];
1575 1575 src = fetchurl {
1576 1576 url = "https://files.pythonhosted.org/packages/a0/f2/6febf5459dff4d7e653314d575469ad2e11b9d2af2c3606360e1c67202f2/pyramid_mailer-0.15.1.tar.gz";
1577 1577 sha256 = "16vg8jb203jgb7b0hd6wllfqvp542qh2ry1gjai2m6qpv5agy2pc";
1578 1578 };
1579 1579 meta = {
1580 1580 license = [ pkgs.lib.licenses.bsdOriginal ];
1581 1581 };
1582 1582 };
1583 1583 "pyramid-mako" = super.buildPythonPackage {
1584 1584 name = "pyramid-mako-1.1.0";
1585 1585 doCheck = false;
1586 1586 propagatedBuildInputs = [
1587 1587 self."pyramid"
1588 1588 self."mako"
1589 1589 ];
1590 1590 src = fetchurl {
1591 1591 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
1592 1592 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
1593 1593 };
1594 1594 meta = {
1595 1595 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1596 1596 };
1597 1597 };
1598 1598 "pysqlite" = super.buildPythonPackage {
1599 1599 name = "pysqlite-2.8.3";
1600 1600 doCheck = false;
1601 1601 src = fetchurl {
1602 1602 url = "https://files.pythonhosted.org/packages/42/02/981b6703e3c83c5b25a829c6e77aad059f9481b0bbacb47e6e8ca12bd731/pysqlite-2.8.3.tar.gz";
1603 1603 sha256 = "1424gwq9sil2ffmnizk60q36vydkv8rxs6m7xs987kz8cdc37lqp";
1604 1604 };
1605 1605 meta = {
1606 1606 license = [ { fullName = "zlib/libpng License"; } { fullName = "zlib/libpng license"; } ];
1607 1607 };
1608 1608 };
1609 1609 "pytest" = super.buildPythonPackage {
1610 1610 name = "pytest-4.6.5";
1611 1611 doCheck = false;
1612 1612 propagatedBuildInputs = [
1613 1613 self."py"
1614 1614 self."six"
1615 1615 self."packaging"
1616 1616 self."attrs"
1617 1617 self."atomicwrites"
1618 1618 self."pluggy"
1619 1619 self."importlib-metadata"
1620 1620 self."wcwidth"
1621 1621 self."funcsigs"
1622 1622 self."pathlib2"
1623 1623 self."more-itertools"
1624 1624 ];
1625 1625 src = fetchurl {
1626 1626 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
1627 1627 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
1628 1628 };
1629 1629 meta = {
1630 1630 license = [ pkgs.lib.licenses.mit ];
1631 1631 };
1632 1632 };
1633 1633 "pytest-cov" = super.buildPythonPackage {
1634 1634 name = "pytest-cov-2.7.1";
1635 1635 doCheck = false;
1636 1636 propagatedBuildInputs = [
1637 1637 self."pytest"
1638 1638 self."coverage"
1639 1639 ];
1640 1640 src = fetchurl {
1641 1641 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
1642 1642 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
1643 1643 };
1644 1644 meta = {
1645 1645 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
1646 1646 };
1647 1647 };
1648 1648 "pytest-profiling" = super.buildPythonPackage {
1649 1649 name = "pytest-profiling-1.7.0";
1650 1650 doCheck = false;
1651 1651 propagatedBuildInputs = [
1652 1652 self."six"
1653 1653 self."pytest"
1654 1654 self."gprof2dot"
1655 1655 ];
1656 1656 src = fetchurl {
1657 1657 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
1658 1658 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
1659 1659 };
1660 1660 meta = {
1661 1661 license = [ pkgs.lib.licenses.mit ];
1662 1662 };
1663 1663 };
1664 1664 "pytest-runner" = super.buildPythonPackage {
1665 1665 name = "pytest-runner-5.1";
1666 1666 doCheck = false;
1667 1667 src = fetchurl {
1668 1668 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
1669 1669 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
1670 1670 };
1671 1671 meta = {
1672 1672 license = [ pkgs.lib.licenses.mit ];
1673 1673 };
1674 1674 };
1675 1675 "pytest-sugar" = super.buildPythonPackage {
1676 1676 name = "pytest-sugar-0.9.2";
1677 1677 doCheck = false;
1678 1678 propagatedBuildInputs = [
1679 1679 self."pytest"
1680 1680 self."termcolor"
1681 1681 self."packaging"
1682 1682 ];
1683 1683 src = fetchurl {
1684 1684 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
1685 1685 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
1686 1686 };
1687 1687 meta = {
1688 1688 license = [ pkgs.lib.licenses.bsdOriginal ];
1689 1689 };
1690 1690 };
1691 1691 "pytest-timeout" = super.buildPythonPackage {
1692 1692 name = "pytest-timeout-1.3.3";
1693 1693 doCheck = false;
1694 1694 propagatedBuildInputs = [
1695 1695 self."pytest"
1696 1696 ];
1697 1697 src = fetchurl {
1698 1698 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
1699 1699 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
1700 1700 };
1701 1701 meta = {
1702 1702 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
1703 1703 };
1704 1704 };
1705 1705 "python-dateutil" = super.buildPythonPackage {
1706 1706 name = "python-dateutil-2.8.1";
1707 1707 doCheck = false;
1708 1708 propagatedBuildInputs = [
1709 1709 self."six"
1710 1710 ];
1711 1711 src = fetchurl {
1712 1712 url = "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz";
1713 1713 sha256 = "0g42w7k5007iv9dam6gnja2ry8ydwirh99mgdll35s12pyfzxsvk";
1714 1714 };
1715 1715 meta = {
1716 1716 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.asl20 { fullName = "Dual License"; } ];
1717 1717 };
1718 1718 };
1719 1719 "python-editor" = super.buildPythonPackage {
1720 1720 name = "python-editor-1.0.4";
1721 1721 doCheck = false;
1722 1722 src = fetchurl {
1723 1723 url = "https://files.pythonhosted.org/packages/0a/85/78f4a216d28343a67b7397c99825cff336330893f00601443f7c7b2f2234/python-editor-1.0.4.tar.gz";
1724 1724 sha256 = "0yrjh8w72ivqxi4i7xsg5b1vz15x8fg51xra7c3bgfyxqnyadzai";
1725 1725 };
1726 1726 meta = {
1727 1727 license = [ pkgs.lib.licenses.asl20 { fullName = "Apache"; } ];
1728 1728 };
1729 1729 };
1730 1730 "python-ldap" = super.buildPythonPackage {
1731 1731 name = "python-ldap-3.2.0";
1732 1732 doCheck = false;
1733 1733 propagatedBuildInputs = [
1734 1734 self."pyasn1"
1735 1735 self."pyasn1-modules"
1736 1736 ];
1737 1737 src = fetchurl {
1738 1738 url = "https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz";
1739 1739 sha256 = "13nvrhp85yr0jyxixcjj012iw8l9wynxxlykm9j3alss6waln73x";
1740 1740 };
1741 1741 meta = {
1742 1742 license = [ pkgs.lib.licenses.psfl ];
1743 1743 };
1744 1744 };
1745 1745 "python-memcached" = super.buildPythonPackage {
1746 1746 name = "python-memcached-1.59";
1747 1747 doCheck = false;
1748 1748 propagatedBuildInputs = [
1749 1749 self."six"
1750 1750 ];
1751 1751 src = fetchurl {
1752 1752 url = "https://files.pythonhosted.org/packages/90/59/5faf6e3cd8a568dd4f737ddae4f2e54204fd8c51f90bf8df99aca6c22318/python-memcached-1.59.tar.gz";
1753 1753 sha256 = "0kvyapavbirk2x3n1jx4yb9nyigrj1s3x15nm3qhpvhkpqvqdqm2";
1754 1754 };
1755 1755 meta = {
1756 1756 license = [ pkgs.lib.licenses.psfl ];
1757 1757 };
1758 1758 };
1759 1759 "python-pam" = super.buildPythonPackage {
1760 1760 name = "python-pam-1.8.4";
1761 1761 doCheck = false;
1762 1762 src = fetchurl {
1763 1763 url = "https://files.pythonhosted.org/packages/01/16/544d01cae9f28e0292dbd092b6b8b0bf222b528f362ee768a5bed2140111/python-pam-1.8.4.tar.gz";
1764 1764 sha256 = "16whhc0vr7gxsbzvsnq65nq8fs3wwmx755cavm8kkczdkz4djmn8";
1765 1765 };
1766 1766 meta = {
1767 1767 license = [ { fullName = "License :: OSI Approved :: MIT License"; } pkgs.lib.licenses.mit ];
1768 1768 };
1769 1769 };
1770 1770 "python-saml" = super.buildPythonPackage {
1771 1771 name = "python-saml-2.4.2";
1772 1772 doCheck = false;
1773 1773 propagatedBuildInputs = [
1774 1774 self."dm.xmlsec.binding"
1775 1775 self."isodate"
1776 1776 self."defusedxml"
1777 1777 ];
1778 1778 src = fetchurl {
1779 1779 url = "https://files.pythonhosted.org/packages/79/a8/a6611017e0883102fd5e2b73c9d90691b8134e38247c04ee1531d3dc647c/python-saml-2.4.2.tar.gz";
1780 1780 sha256 = "0dls4hwvf13yg7x5yfjrghbywg8g38vn5vr0rsf70hli3ydbfm43";
1781 1781 };
1782 1782 meta = {
1783 1783 license = [ pkgs.lib.licenses.mit ];
1784 1784 };
1785 1785 };
1786 1786 "pytz" = super.buildPythonPackage {
1787 1787 name = "pytz-2019.3";
1788 1788 doCheck = false;
1789 1789 src = fetchurl {
1790 1790 url = "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz";
1791 1791 sha256 = "1ghrk1wg45d3nymj7bf4zj03n3bh64xmczhk4pfi577hdkdhcb5h";
1792 1792 };
1793 1793 meta = {
1794 1794 license = [ pkgs.lib.licenses.mit ];
1795 1795 };
1796 1796 };
1797 1797 "pyzmq" = super.buildPythonPackage {
1798 1798 name = "pyzmq-14.6.0";
1799 1799 doCheck = false;
1800 1800 src = fetchurl {
1801 1801 url = "https://files.pythonhosted.org/packages/8a/3b/5463d5a9d712cd8bbdac335daece0d69f6a6792da4e3dd89956c0db4e4e6/pyzmq-14.6.0.tar.gz";
1802 1802 sha256 = "1frmbjykvhmdg64g7sn20c9fpamrsfxwci1nhhg8q7jgz5pq0ikp";
1803 1803 };
1804 1804 meta = {
1805 1805 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "LGPL+BSD"; } { fullName = "GNU Library or Lesser General Public License (LGPL)"; } ];
1806 1806 };
1807 1807 };
1808 1808 "PyYAML" = super.buildPythonPackage {
1809 1809 name = "PyYAML-5.3.1";
1810 1810 doCheck = false;
1811 1811 src = fetchurl {
1812 1812 url = "https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz";
1813 1813 sha256 = "0pb4zvkfxfijkpgd1b86xjsqql97ssf1knbd1v53wkg1qm9cgsmq";
1814 1814 };
1815 1815 meta = {
1816 1816 license = [ pkgs.lib.licenses.mit ];
1817 1817 };
1818 1818 };
1819 1819 "regex" = super.buildPythonPackage {
1820 1820 name = "regex-2020.9.27";
1821 1821 doCheck = false;
1822 1822 src = fetchurl {
1823 1823 url = "https://files.pythonhosted.org/packages/93/8c/17f45cdfb39b13d4b5f909e4b4c2917abcbdef9c0036919a0399769148cf/regex-2020.9.27.tar.gz";
1824 1824 sha256 = "179ngfzwbsjvn5vhyzdahvmg0f7acahkwwy9bpjy1pv08bm2mwx6";
1825 1825 };
1826 1826 meta = {
1827 1827 license = [ pkgs.lib.licenses.psfl ];
1828 1828 };
1829 1829 };
1830 1830 "redis" = super.buildPythonPackage {
1831 1831 name = "redis-3.5.3";
1832 1832 doCheck = false;
1833 1833 src = fetchurl {
1834 1834 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
1835 1835 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
1836 1836 };
1837 1837 meta = {
1838 1838 license = [ pkgs.lib.licenses.mit ];
1839 1839 };
1840 1840 };
1841 1841 "repoze.lru" = super.buildPythonPackage {
1842 1842 name = "repoze.lru-0.7";
1843 1843 doCheck = false;
1844 1844 src = fetchurl {
1845 1845 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
1846 1846 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
1847 1847 };
1848 1848 meta = {
1849 1849 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1850 1850 };
1851 1851 };
1852 1852 "repoze.sendmail" = super.buildPythonPackage {
1853 1853 name = "repoze.sendmail-4.4.1";
1854 1854 doCheck = false;
1855 1855 propagatedBuildInputs = [
1856 1856 self."setuptools"
1857 1857 self."zope.interface"
1858 1858 self."transaction"
1859 1859 ];
1860 1860 src = fetchurl {
1861 1861 url = "https://files.pythonhosted.org/packages/12/4e/8ef1fd5c42765d712427b9c391419a77bd48877886d2cbc5e9f23c8cad9b/repoze.sendmail-4.4.1.tar.gz";
1862 1862 sha256 = "096ln02jr2afk7ab9j2czxqv2ryqq7m86ah572nqplx52iws73ks";
1863 1863 };
1864 1864 meta = {
1865 1865 license = [ pkgs.lib.licenses.zpl21 ];
1866 1866 };
1867 1867 };
1868 1868 "requests" = super.buildPythonPackage {
1869 1869 name = "requests-2.22.0";
1870 1870 doCheck = false;
1871 1871 propagatedBuildInputs = [
1872 1872 self."chardet"
1873 1873 self."idna"
1874 1874 self."urllib3"
1875 1875 self."certifi"
1876 1876 ];
1877 1877 src = fetchurl {
1878 1878 url = "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz";
1879 1879 sha256 = "1d5ybh11jr5sm7xp6mz8fyc7vrp4syifds91m7sj60xalal0gq0i";
1880 1880 };
1881 1881 meta = {
1882 1882 license = [ pkgs.lib.licenses.asl20 ];
1883 1883 };
1884 1884 };
1885 1885 "rhodecode-enterprise-ce" = super.buildPythonPackage {
1886 name = "rhodecode-enterprise-ce-4.24.1";
1886 name = "rhodecode-enterprise-ce-4.25.0";
1887 1887 buildInputs = [
1888 1888 self."pytest"
1889 1889 self."py"
1890 1890 self."pytest-cov"
1891 1891 self."pytest-sugar"
1892 1892 self."pytest-runner"
1893 1893 self."pytest-profiling"
1894 1894 self."pytest-timeout"
1895 1895 self."gprof2dot"
1896 1896 self."mock"
1897 1897 self."cov-core"
1898 1898 self."coverage"
1899 1899 self."webtest"
1900 1900 self."beautifulsoup4"
1901 1901 self."configobj"
1902 1902 ];
1903 1903 doCheck = true;
1904 1904 propagatedBuildInputs = [
1905 1905 self."amqp"
1906 1906 self."babel"
1907 1907 self."beaker"
1908 1908 self."bleach"
1909 1909 self."celery"
1910 1910 self."channelstream"
1911 1911 self."click"
1912 1912 self."colander"
1913 1913 self."configobj"
1914 1914 self."cssselect"
1915 1915 self."cryptography"
1916 1916 self."decorator"
1917 1917 self."deform"
1918 1918 self."docutils"
1919 1919 self."dogpile.cache"
1920 1920 self."dogpile.core"
1921 1921 self."formencode"
1922 1922 self."future"
1923 1923 self."futures"
1924 1924 self."infrae.cache"
1925 1925 self."iso8601"
1926 1926 self."itsdangerous"
1927 1927 self."kombu"
1928 1928 self."lxml"
1929 1929 self."mako"
1930 1930 self."markdown"
1931 1931 self."markupsafe"
1932 1932 self."msgpack-python"
1933 1933 self."pyotp"
1934 1934 self."packaging"
1935 1935 self."pathlib2"
1936 1936 self."paste"
1937 1937 self."pastedeploy"
1938 1938 self."pastescript"
1939 1939 self."peppercorn"
1940 1940 self."premailer"
1941 1941 self."psutil"
1942 1942 self."py-bcrypt"
1943 1943 self."pycurl"
1944 1944 self."pycrypto"
1945 1945 self."pygments"
1946 1946 self."pyparsing"
1947 1947 self."pyramid-debugtoolbar"
1948 1948 self."pyramid-mako"
1949 1949 self."pyramid"
1950 1950 self."pyramid-mailer"
1951 1951 self."python-dateutil"
1952 1952 self."python-ldap"
1953 1953 self."python-memcached"
1954 1954 self."python-pam"
1955 1955 self."python-saml"
1956 1956 self."pytz"
1957 1957 self."tzlocal"
1958 1958 self."pyzmq"
1959 1959 self."py-gfm"
1960 1960 self."regex"
1961 1961 self."redis"
1962 1962 self."repoze.lru"
1963 1963 self."requests"
1964 1964 self."routes"
1965 1965 self."simplejson"
1966 1966 self."six"
1967 1967 self."sqlalchemy"
1968 1968 self."sshpubkeys"
1969 1969 self."subprocess32"
1970 1970 self."supervisor"
1971 1971 self."translationstring"
1972 1972 self."urllib3"
1973 1973 self."urlobject"
1974 1974 self."venusian"
1975 1975 self."weberror"
1976 1976 self."webhelpers2"
1977 1977 self."webob"
1978 1978 self."whoosh"
1979 1979 self."wsgiref"
1980 1980 self."zope.cachedescriptors"
1981 1981 self."zope.deprecation"
1982 1982 self."zope.event"
1983 1983 self."zope.interface"
1984 1984 self."mysql-python"
1985 1985 self."pymysql"
1986 1986 self."pysqlite"
1987 1987 self."psycopg2"
1988 1988 self."nbconvert"
1989 1989 self."nbformat"
1990 1990 self."jupyter-client"
1991 1991 self."jupyter-core"
1992 1992 self."alembic"
1993 1993 self."invoke"
1994 1994 self."bumpversion"
1995 1995 self."gevent"
1996 1996 self."greenlet"
1997 1997 self."gunicorn"
1998 1998 self."waitress"
1999 1999 self."ipdb"
2000 2000 self."ipython"
2001 2001 self."rhodecode-tools"
2002 2002 self."appenlight-client"
2003 2003 self."pytest"
2004 2004 self."py"
2005 2005 self."pytest-cov"
2006 2006 self."pytest-sugar"
2007 2007 self."pytest-runner"
2008 2008 self."pytest-profiling"
2009 2009 self."pytest-timeout"
2010 2010 self."gprof2dot"
2011 2011 self."mock"
2012 2012 self."cov-core"
2013 2013 self."coverage"
2014 2014 self."webtest"
2015 2015 self."beautifulsoup4"
2016 2016 ];
2017 2017 src = ./.;
2018 2018 meta = {
2019 2019 license = [ { fullName = "Affero GNU General Public License v3 or later (AGPLv3+)"; } { fullName = "AGPLv3, and Commercial License"; } ];
2020 2020 };
2021 2021 };
2022 2022 "rhodecode-tools" = super.buildPythonPackage {
2023 2023 name = "rhodecode-tools-1.4.0";
2024 2024 doCheck = false;
2025 2025 propagatedBuildInputs = [
2026 2026 self."click"
2027 2027 self."future"
2028 2028 self."six"
2029 2029 self."mako"
2030 2030 self."markupsafe"
2031 2031 self."requests"
2032 2032 self."urllib3"
2033 2033 self."whoosh"
2034 2034 self."elasticsearch"
2035 2035 self."elasticsearch-dsl"
2036 2036 self."elasticsearch2"
2037 2037 self."elasticsearch1-dsl"
2038 2038 ];
2039 2039 src = fetchurl {
2040 2040 url = "https://code.rhodecode.com/rhodecode-tools-ce/artifacts/download/0-ed54e749-2ef5-4bc7-ae7f-7900e3c2aa15.tar.gz?sha256=76f024bad3a1e55fdb3d64f13f5b77ff21a12fee699918de2110fe21effd5a3a";
2041 2041 sha256 = "0fjszppj3zhh47g1i6b9xqps28gzfxdkzwb47pdmzrd1sfx29w3n";
2042 2042 };
2043 2043 meta = {
2044 2044 license = [ { fullName = "Apache 2.0 and Proprietary"; } ];
2045 2045 };
2046 2046 };
2047 2047 "routes" = super.buildPythonPackage {
2048 2048 name = "routes-2.4.1";
2049 2049 doCheck = false;
2050 2050 propagatedBuildInputs = [
2051 2051 self."six"
2052 2052 self."repoze.lru"
2053 2053 ];
2054 2054 src = fetchurl {
2055 2055 url = "https://files.pythonhosted.org/packages/33/38/ea827837e68d9c7dde4cff7ec122a93c319f0effc08ce92a17095576603f/Routes-2.4.1.tar.gz";
2056 2056 sha256 = "1zamff3m0kc4vyfniyhxpkkcqv1rrgnmh37ykxv34nna1ws47vi6";
2057 2057 };
2058 2058 meta = {
2059 2059 license = [ pkgs.lib.licenses.mit ];
2060 2060 };
2061 2061 };
2062 2062 "scandir" = super.buildPythonPackage {
2063 2063 name = "scandir-1.10.0";
2064 2064 doCheck = false;
2065 2065 src = fetchurl {
2066 2066 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
2067 2067 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
2068 2068 };
2069 2069 meta = {
2070 2070 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
2071 2071 };
2072 2072 };
2073 2073 "setproctitle" = super.buildPythonPackage {
2074 2074 name = "setproctitle-1.1.10";
2075 2075 doCheck = false;
2076 2076 src = fetchurl {
2077 2077 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
2078 2078 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
2079 2079 };
2080 2080 meta = {
2081 2081 license = [ pkgs.lib.licenses.bsdOriginal ];
2082 2082 };
2083 2083 };
2084 2084 "setuptools" = super.buildPythonPackage {
2085 2085 name = "setuptools-44.1.0";
2086 2086 doCheck = false;
2087 2087 src = fetchurl {
2088 2088 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
2089 2089 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
2090 2090 };
2091 2091 meta = {
2092 2092 license = [ pkgs.lib.licenses.mit ];
2093 2093 };
2094 2094 };
2095 "setuptools-scm" = super.buildPythonPackage {
2096 name = "setuptools-scm-3.5.0";
2097 doCheck = false;
2098 src = fetchurl {
2099 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
2100 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
2101 };
2102 meta = {
2103 license = [ pkgs.lib.licenses.psfl ];
2104 };
2105 };
2095 2106 "simplegeneric" = super.buildPythonPackage {
2096 2107 name = "simplegeneric-0.8.1";
2097 2108 doCheck = false;
2098 2109 src = fetchurl {
2099 2110 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
2100 2111 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
2101 2112 };
2102 2113 meta = {
2103 2114 license = [ pkgs.lib.licenses.zpl21 ];
2104 2115 };
2105 2116 };
2106 2117 "simplejson" = super.buildPythonPackage {
2107 2118 name = "simplejson-3.16.0";
2108 2119 doCheck = false;
2109 2120 src = fetchurl {
2110 2121 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
2111 2122 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
2112 2123 };
2113 2124 meta = {
2114 2125 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
2115 2126 };
2116 2127 };
2117 2128 "six" = super.buildPythonPackage {
2118 2129 name = "six-1.11.0";
2119 2130 doCheck = false;
2120 2131 src = fetchurl {
2121 2132 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
2122 2133 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
2123 2134 };
2124 2135 meta = {
2125 2136 license = [ pkgs.lib.licenses.mit ];
2126 2137 };
2127 2138 };
2128 2139 "sqlalchemy" = super.buildPythonPackage {
2129 2140 name = "sqlalchemy-1.3.15";
2130 2141 doCheck = false;
2131 2142 src = fetchurl {
2132 2143 url = "https://files.pythonhosted.org/packages/8c/30/4134e726dd5ed13728ff814fa91fc01c447ad8700504653fe99d91fdd34b/SQLAlchemy-1.3.15.tar.gz";
2133 2144 sha256 = "0iglkvymfp35zm5pxy5kzqvcv96kkas0chqdx7xpla86sspa9k64";
2134 2145 };
2135 2146 meta = {
2136 2147 license = [ pkgs.lib.licenses.mit ];
2137 2148 };
2138 2149 };
2139 2150 "sshpubkeys" = super.buildPythonPackage {
2140 2151 name = "sshpubkeys-3.1.0";
2141 2152 doCheck = false;
2142 2153 propagatedBuildInputs = [
2143 2154 self."cryptography"
2144 2155 self."ecdsa"
2145 2156 ];
2146 2157 src = fetchurl {
2147 2158 url = "https://files.pythonhosted.org/packages/00/23/f7508a12007c96861c3da811992f14283d79c819d71a217b3e12d5196649/sshpubkeys-3.1.0.tar.gz";
2148 2159 sha256 = "105g2li04nm1hb15a2y6hm9m9k7fbrkd5l3gy12w3kgcmsf3k25k";
2149 2160 };
2150 2161 meta = {
2151 2162 license = [ pkgs.lib.licenses.bsdOriginal ];
2152 2163 };
2153 2164 };
2154 2165 "subprocess32" = super.buildPythonPackage {
2155 2166 name = "subprocess32-3.5.4";
2156 2167 doCheck = false;
2157 2168 src = fetchurl {
2158 2169 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
2159 2170 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
2160 2171 };
2161 2172 meta = {
2162 2173 license = [ pkgs.lib.licenses.psfl ];
2163 2174 };
2164 2175 };
2165 2176 "supervisor" = super.buildPythonPackage {
2166 2177 name = "supervisor-4.1.0";
2167 2178 doCheck = false;
2168 2179 src = fetchurl {
2169 2180 url = "https://files.pythonhosted.org/packages/de/87/ee1ad8fa533a4b5f2c7623f4a2b585d3c1947af7bed8e65bc7772274320e/supervisor-4.1.0.tar.gz";
2170 2181 sha256 = "10q36sa1jqljyyyl7cif52akpygl5kmlqq9x91hmx53f8zh6zj1d";
2171 2182 };
2172 2183 meta = {
2173 2184 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2174 2185 };
2175 2186 };
2176 2187 "tempita" = super.buildPythonPackage {
2177 2188 name = "tempita-0.5.2";
2178 2189 doCheck = false;
2179 2190 src = fetchurl {
2180 2191 url = "https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz";
2181 2192 sha256 = "177wwq45slfyajd8csy477bmdmzipyw0dm7i85k3akb7m85wzkna";
2182 2193 };
2183 2194 meta = {
2184 2195 license = [ pkgs.lib.licenses.mit ];
2185 2196 };
2186 2197 };
2187 2198 "termcolor" = super.buildPythonPackage {
2188 2199 name = "termcolor-1.1.0";
2189 2200 doCheck = false;
2190 2201 src = fetchurl {
2191 2202 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
2192 2203 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
2193 2204 };
2194 2205 meta = {
2195 2206 license = [ pkgs.lib.licenses.mit ];
2196 2207 };
2197 2208 };
2198 2209 "testpath" = super.buildPythonPackage {
2199 2210 name = "testpath-0.4.4";
2200 2211 doCheck = false;
2201 2212 src = fetchurl {
2202 2213 url = "https://files.pythonhosted.org/packages/2c/b3/5d57205e896d8998d77ad12aa42ebce75cd97d8b9a97d00ba078c4c9ffeb/testpath-0.4.4.tar.gz";
2203 2214 sha256 = "0zpcmq22dz79ipvvsfnw1ykpjcaj6xyzy7ws77s5b5ql3hka7q30";
2204 2215 };
2205 2216 meta = {
2206 2217 license = [ ];
2207 2218 };
2208 2219 };
2209 2220 "traitlets" = super.buildPythonPackage {
2210 2221 name = "traitlets-4.3.3";
2211 2222 doCheck = false;
2212 2223 propagatedBuildInputs = [
2213 2224 self."ipython-genutils"
2214 2225 self."six"
2215 2226 self."decorator"
2216 2227 self."enum34"
2217 2228 ];
2218 2229 src = fetchurl {
2219 2230 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
2220 2231 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
2221 2232 };
2222 2233 meta = {
2223 2234 license = [ pkgs.lib.licenses.bsdOriginal ];
2224 2235 };
2225 2236 };
2226 2237 "transaction" = super.buildPythonPackage {
2227 2238 name = "transaction-2.4.0";
2228 2239 doCheck = false;
2229 2240 propagatedBuildInputs = [
2230 2241 self."zope.interface"
2231 2242 ];
2232 2243 src = fetchurl {
2233 2244 url = "https://files.pythonhosted.org/packages/9d/7d/0e8af0d059e052b9dcf2bb5a08aad20ae3e238746bdd3f8701a60969b363/transaction-2.4.0.tar.gz";
2234 2245 sha256 = "17wz1y524ca07vr03yddy8dv0gbscs06dbdywmllxv5rc725jq3j";
2235 2246 };
2236 2247 meta = {
2237 2248 license = [ pkgs.lib.licenses.zpl21 ];
2238 2249 };
2239 2250 };
2240 2251 "translationstring" = super.buildPythonPackage {
2241 2252 name = "translationstring-1.3";
2242 2253 doCheck = false;
2243 2254 src = fetchurl {
2244 2255 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
2245 2256 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
2246 2257 };
2247 2258 meta = {
2248 2259 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
2249 2260 };
2250 2261 };
2251 2262 "tzlocal" = super.buildPythonPackage {
2252 2263 name = "tzlocal-1.5.1";
2253 2264 doCheck = false;
2254 2265 propagatedBuildInputs = [
2255 2266 self."pytz"
2256 2267 ];
2257 2268 src = fetchurl {
2258 2269 url = "https://files.pythonhosted.org/packages/cb/89/e3687d3ed99bc882793f82634e9824e62499fdfdc4b1ae39e211c5b05017/tzlocal-1.5.1.tar.gz";
2259 2270 sha256 = "0kiciwiqx0bv0fbc913idxibc4ygg4cb7f8rcpd9ij2shi4bigjf";
2260 2271 };
2261 2272 meta = {
2262 2273 license = [ pkgs.lib.licenses.mit ];
2263 2274 };
2264 2275 };
2265 2276 "urllib3" = super.buildPythonPackage {
2266 2277 name = "urllib3-1.25.2";
2267 2278 doCheck = false;
2268 2279 src = fetchurl {
2269 2280 url = "https://files.pythonhosted.org/packages/9a/8b/ea6d2beb2da6e331e9857d0a60b79ed4f72dcbc4e2c7f2d2521b0480fda2/urllib3-1.25.2.tar.gz";
2270 2281 sha256 = "1nq2k4pss1ihsjh02r41sqpjpm5rfqkjfysyq7g7n2i1p7c66c55";
2271 2282 };
2272 2283 meta = {
2273 2284 license = [ pkgs.lib.licenses.mit ];
2274 2285 };
2275 2286 };
2276 2287 "urlobject" = super.buildPythonPackage {
2277 2288 name = "urlobject-2.4.3";
2278 2289 doCheck = false;
2279 2290 src = fetchurl {
2280 2291 url = "https://files.pythonhosted.org/packages/e2/b8/1d0a916f4b34c4618846e6da0e4eeaa8fcb4a2f39e006434fe38acb74b34/URLObject-2.4.3.tar.gz";
2281 2292 sha256 = "1ahc8ficzfvr2avln71immfh4ls0zyv6cdaa5xmkdj5rd87f5cj7";
2282 2293 };
2283 2294 meta = {
2284 2295 license = [ pkgs.lib.licenses.publicDomain ];
2285 2296 };
2286 2297 };
2287 2298 "venusian" = super.buildPythonPackage {
2288 2299 name = "venusian-1.2.0";
2289 2300 doCheck = false;
2290 2301 src = fetchurl {
2291 2302 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
2292 2303 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
2293 2304 };
2294 2305 meta = {
2295 2306 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
2296 2307 };
2297 2308 };
2298 2309 "vine" = super.buildPythonPackage {
2299 2310 name = "vine-1.3.0";
2300 2311 doCheck = false;
2301 2312 src = fetchurl {
2302 2313 url = "https://files.pythonhosted.org/packages/1c/e1/79fb8046e607dd6c2ad05c9b8ebac9d0bd31d086a08f02699e96fc5b3046/vine-1.3.0.tar.gz";
2303 2314 sha256 = "11ydsbhl1vabndc2r979dv61s6j2b0giq6dgvryifvq1m7bycghk";
2304 2315 };
2305 2316 meta = {
2306 2317 license = [ pkgs.lib.licenses.bsdOriginal ];
2307 2318 };
2308 2319 };
2309 2320 "waitress" = super.buildPythonPackage {
2310 2321 name = "waitress-1.3.1";
2311 2322 doCheck = false;
2312 2323 src = fetchurl {
2313 2324 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
2314 2325 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
2315 2326 };
2316 2327 meta = {
2317 2328 license = [ pkgs.lib.licenses.zpl21 ];
2318 2329 };
2319 2330 };
2320 2331 "wcwidth" = super.buildPythonPackage {
2321 2332 name = "wcwidth-0.1.9";
2322 2333 doCheck = false;
2323 2334 src = fetchurl {
2324 2335 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
2325 2336 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
2326 2337 };
2327 2338 meta = {
2328 2339 license = [ pkgs.lib.licenses.mit ];
2329 2340 };
2330 2341 };
2331 2342 "webencodings" = super.buildPythonPackage {
2332 2343 name = "webencodings-0.5.1";
2333 2344 doCheck = false;
2334 2345 src = fetchurl {
2335 2346 url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz";
2336 2347 sha256 = "08qrgrc4hrximb2gqnl69g01s93rhf2842jfxdjljc1dbwj1qsmk";
2337 2348 };
2338 2349 meta = {
2339 2350 license = [ pkgs.lib.licenses.bsdOriginal ];
2340 2351 };
2341 2352 };
2342 2353 "weberror" = super.buildPythonPackage {
2343 2354 name = "weberror-0.13.1";
2344 2355 doCheck = false;
2345 2356 propagatedBuildInputs = [
2346 2357 self."webob"
2347 2358 self."tempita"
2348 2359 self."pygments"
2349 2360 self."paste"
2350 2361 ];
2351 2362 src = fetchurl {
2352 2363 url = "https://files.pythonhosted.org/packages/07/0a/09ca5eb0fab5c0d17b380026babe81c96ecebb13f2b06c3203432dd7be72/WebError-0.13.1.tar.gz";
2353 2364 sha256 = "0r4qvnf2r92gfnpa1kwygh4j2x6j3axg2i4an6hyxwg2gpaqp7y1";
2354 2365 };
2355 2366 meta = {
2356 2367 license = [ pkgs.lib.licenses.mit ];
2357 2368 };
2358 2369 };
2359 2370 "webhelpers2" = super.buildPythonPackage {
2360 2371 name = "webhelpers2-2.0";
2361 2372 doCheck = false;
2362 2373 propagatedBuildInputs = [
2363 2374 self."markupsafe"
2364 2375 self."six"
2365 2376 ];
2366 2377 src = fetchurl {
2367 2378 url = "https://files.pythonhosted.org/packages/ff/30/56342c6ea522439e3662427c8d7b5e5b390dff4ff2dc92d8afcb8ab68b75/WebHelpers2-2.0.tar.gz";
2368 2379 sha256 = "0aphva1qmxh83n01p53f5fd43m4srzbnfbz5ajvbx9aj2aipwmcs";
2369 2380 };
2370 2381 meta = {
2371 2382 license = [ pkgs.lib.licenses.mit ];
2372 2383 };
2373 2384 };
2374 2385 "webob" = super.buildPythonPackage {
2375 2386 name = "webob-1.8.5";
2376 2387 doCheck = false;
2377 2388 src = fetchurl {
2378 2389 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
2379 2390 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
2380 2391 };
2381 2392 meta = {
2382 2393 license = [ pkgs.lib.licenses.mit ];
2383 2394 };
2384 2395 };
2385 2396 "webtest" = super.buildPythonPackage {
2386 2397 name = "webtest-2.0.34";
2387 2398 doCheck = false;
2388 2399 propagatedBuildInputs = [
2389 2400 self."six"
2390 2401 self."webob"
2391 2402 self."waitress"
2392 2403 self."beautifulsoup4"
2393 2404 ];
2394 2405 src = fetchurl {
2395 2406 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
2396 2407 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
2397 2408 };
2398 2409 meta = {
2399 2410 license = [ pkgs.lib.licenses.mit ];
2400 2411 };
2401 2412 };
2402 2413 "whoosh" = super.buildPythonPackage {
2403 2414 name = "whoosh-2.7.4";
2404 2415 doCheck = false;
2405 2416 src = fetchurl {
2406 2417 url = "https://files.pythonhosted.org/packages/25/2b/6beed2107b148edc1321da0d489afc4617b9ed317ef7b72d4993cad9b684/Whoosh-2.7.4.tar.gz";
2407 2418 sha256 = "10qsqdjpbc85fykc1vgcs8xwbgn4l2l52c8d83xf1q59pwyn79bw";
2408 2419 };
2409 2420 meta = {
2410 2421 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd2 ];
2411 2422 };
2412 2423 };
2413 2424 "ws4py" = super.buildPythonPackage {
2414 2425 name = "ws4py-0.5.1";
2415 2426 doCheck = false;
2416 2427 src = fetchurl {
2417 2428 url = "https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz";
2418 2429 sha256 = "10slbbf2jm4hpr92jx7kh7mhf48sjl01v2w4d8z3f1p0ybbp7l19";
2419 2430 };
2420 2431 meta = {
2421 2432 license = [ pkgs.lib.licenses.bsdOriginal ];
2422 2433 };
2423 2434 };
2424 2435 "wsgiref" = super.buildPythonPackage {
2425 2436 name = "wsgiref-0.1.2";
2426 2437 doCheck = false;
2427 2438 src = fetchurl {
2428 2439 url = "https://files.pythonhosted.org/packages/41/9e/309259ce8dff8c596e8c26df86dbc4e848b9249fd36797fd60be456f03fc/wsgiref-0.1.2.zip";
2429 2440 sha256 = "0y8fyjmpq7vwwm4x732w97qbkw78rjwal5409k04cw4m03411rn7";
2430 2441 };
2431 2442 meta = {
2432 2443 license = [ { fullName = "PSF or ZPL"; } ];
2433 2444 };
2434 2445 };
2435 2446 "zipp" = super.buildPythonPackage {
2436 2447 name = "zipp-1.2.0";
2437 2448 doCheck = false;
2438 2449 propagatedBuildInputs = [
2439 2450 self."contextlib2"
2440 2451 ];
2441 2452 src = fetchurl {
2442 2453 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
2443 2454 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
2444 2455 };
2445 2456 meta = {
2446 2457 license = [ pkgs.lib.licenses.mit ];
2447 2458 };
2448 2459 };
2449 2460 "zope.cachedescriptors" = super.buildPythonPackage {
2450 2461 name = "zope.cachedescriptors-4.3.1";
2451 2462 doCheck = false;
2452 2463 propagatedBuildInputs = [
2453 2464 self."setuptools"
2454 2465 ];
2455 2466 src = fetchurl {
2456 2467 url = "https://files.pythonhosted.org/packages/2f/89/ebe1890cc6d3291ebc935558fa764d5fffe571018dbbee200e9db78762cb/zope.cachedescriptors-4.3.1.tar.gz";
2457 2468 sha256 = "0jhr3m5p74c6r7k8iv0005b8bfsialih9d7zl5vx38rf5xq1lk8z";
2458 2469 };
2459 2470 meta = {
2460 2471 license = [ pkgs.lib.licenses.zpl21 ];
2461 2472 };
2462 2473 };
2463 2474 "zope.deprecation" = super.buildPythonPackage {
2464 2475 name = "zope.deprecation-4.4.0";
2465 2476 doCheck = false;
2466 2477 propagatedBuildInputs = [
2467 2478 self."setuptools"
2468 2479 ];
2469 2480 src = fetchurl {
2470 2481 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
2471 2482 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
2472 2483 };
2473 2484 meta = {
2474 2485 license = [ pkgs.lib.licenses.zpl21 ];
2475 2486 };
2476 2487 };
2477 2488 "zope.event" = super.buildPythonPackage {
2478 2489 name = "zope.event-4.4";
2479 2490 doCheck = false;
2480 2491 propagatedBuildInputs = [
2481 2492 self."setuptools"
2482 2493 ];
2483 2494 src = fetchurl {
2484 2495 url = "https://files.pythonhosted.org/packages/4c/b2/51c0369adcf5be2334280eed230192ab3b03f81f8efda9ddea6f65cc7b32/zope.event-4.4.tar.gz";
2485 2496 sha256 = "1ksbc726av9xacml6jhcfyn828hlhb9xlddpx6fcvnlvmpmpvhk9";
2486 2497 };
2487 2498 meta = {
2488 2499 license = [ pkgs.lib.licenses.zpl21 ];
2489 2500 };
2490 2501 };
2491 2502 "zope.interface" = super.buildPythonPackage {
2492 2503 name = "zope.interface-4.6.0";
2493 2504 doCheck = false;
2494 2505 propagatedBuildInputs = [
2495 2506 self."setuptools"
2496 2507 ];
2497 2508 src = fetchurl {
2498 2509 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
2499 2510 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
2500 2511 };
2501 2512 meta = {
2502 2513 license = [ pkgs.lib.licenses.zpl21 ];
2503 2514 };
2504 2515 };
2505 2516
2506 2517 ### Test requirements
2507 2518
2508 2519
2509 2520 }
@@ -1,1 +1,1 b''
1 4.24.1 No newline at end of file
1 4.25.0 No newline at end of file
@@ -1,2524 +1,2524 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import time
23 23
24 24 import rhodecode
25 25 from rhodecode.api import (
26 26 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
27 27 from rhodecode.api.utils import (
28 28 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
29 29 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
30 30 get_perm_or_error, parse_args, get_origin, build_commit_data,
31 31 validate_set_owner_permissions)
32 32 from rhodecode.lib import audit_logger, rc_cache, channelstream
33 33 from rhodecode.lib import repo_maintenance
34 34 from rhodecode.lib.auth import (
35 35 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
36 36 HasRepoPermissionAnyApi)
37 37 from rhodecode.lib.celerylib.utils import get_task_id
38 38 from rhodecode.lib.utils2 import (
39 39 str2bool, time_to_datetime, safe_str, safe_int, safe_unicode)
40 40 from rhodecode.lib.ext_json import json
41 41 from rhodecode.lib.exceptions import (
42 42 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
43 43 from rhodecode.lib.vcs import RepositoryError
44 44 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
45 45 from rhodecode.model.changeset_status import ChangesetStatusModel
46 46 from rhodecode.model.comment import CommentsModel
47 47 from rhodecode.model.db import (
48 48 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
49 49 ChangesetComment)
50 50 from rhodecode.model.permission import PermissionModel
51 51 from rhodecode.model.pull_request import PullRequestModel
52 52 from rhodecode.model.repo import RepoModel
53 53 from rhodecode.model.scm import ScmModel, RepoList
54 54 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
55 55 from rhodecode.model import validation_schema
56 56 from rhodecode.model.validation_schema.schemas import repo_schema
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60
61 61 @jsonrpc_method()
62 62 def get_repo(request, apiuser, repoid, cache=Optional(True)):
63 63 """
64 64 Gets an existing repository by its name or repository_id.
65 65
66 66 The members section so the output returns users groups or users
67 67 associated with that repository.
68 68
69 69 This command can only be run using an |authtoken| with admin rights,
70 70 or users with at least read rights to the |repo|.
71 71
72 72 :param apiuser: This is filled automatically from the |authtoken|.
73 73 :type apiuser: AuthUser
74 74 :param repoid: The repository name or repository id.
75 75 :type repoid: str or int
76 76 :param cache: use the cached value for last changeset
77 77 :type: cache: Optional(bool)
78 78
79 79 Example output:
80 80
81 81 .. code-block:: bash
82 82
83 83 {
84 84 "error": null,
85 85 "id": <repo_id>,
86 86 "result": {
87 87 "clone_uri": null,
88 88 "created_on": "timestamp",
89 89 "description": "repo description",
90 90 "enable_downloads": false,
91 91 "enable_locking": false,
92 92 "enable_statistics": false,
93 93 "followers": [
94 94 {
95 95 "active": true,
96 96 "admin": false,
97 97 "api_key": "****************************************",
98 98 "api_keys": [
99 99 "****************************************"
100 100 ],
101 101 "email": "user@example.com",
102 102 "emails": [
103 103 "user@example.com"
104 104 ],
105 105 "extern_name": "rhodecode",
106 106 "extern_type": "rhodecode",
107 107 "firstname": "username",
108 108 "ip_addresses": [],
109 109 "language": null,
110 110 "last_login": "2015-09-16T17:16:35.854",
111 111 "lastname": "surname",
112 112 "user_id": <user_id>,
113 113 "username": "name"
114 114 }
115 115 ],
116 116 "fork_of": "parent-repo",
117 117 "landing_rev": [
118 118 "rev",
119 119 "tip"
120 120 ],
121 121 "last_changeset": {
122 122 "author": "User <user@example.com>",
123 123 "branch": "default",
124 124 "date": "timestamp",
125 125 "message": "last commit message",
126 126 "parents": [
127 127 {
128 128 "raw_id": "commit-id"
129 129 }
130 130 ],
131 131 "raw_id": "commit-id",
132 132 "revision": <revision number>,
133 133 "short_id": "short id"
134 134 },
135 135 "lock_reason": null,
136 136 "locked_by": null,
137 137 "locked_date": null,
138 138 "owner": "owner-name",
139 139 "permissions": [
140 140 {
141 141 "name": "super-admin-name",
142 142 "origin": "super-admin",
143 143 "permission": "repository.admin",
144 144 "type": "user"
145 145 },
146 146 {
147 147 "name": "owner-name",
148 148 "origin": "owner",
149 149 "permission": "repository.admin",
150 150 "type": "user"
151 151 },
152 152 {
153 153 "name": "user-group-name",
154 154 "origin": "permission",
155 155 "permission": "repository.write",
156 156 "type": "user_group"
157 157 }
158 158 ],
159 159 "private": true,
160 160 "repo_id": 676,
161 161 "repo_name": "user-group/repo-name",
162 162 "repo_type": "hg"
163 163 }
164 164 }
165 165 """
166 166
167 167 repo = get_repo_or_error(repoid)
168 168 cache = Optional.extract(cache)
169 169
170 170 include_secrets = False
171 171 if has_superadmin_permission(apiuser):
172 172 include_secrets = True
173 173 else:
174 174 # check if we have at least read permission for this repo !
175 175 _perms = (
176 176 'repository.admin', 'repository.write', 'repository.read',)
177 177 validate_repo_permissions(apiuser, repoid, repo, _perms)
178 178
179 179 permissions = []
180 180 for _user in repo.permissions():
181 181 user_data = {
182 182 'name': _user.username,
183 183 'permission': _user.permission,
184 184 'origin': get_origin(_user),
185 185 'type': "user",
186 186 }
187 187 permissions.append(user_data)
188 188
189 189 for _user_group in repo.permission_user_groups():
190 190 user_group_data = {
191 191 'name': _user_group.users_group_name,
192 192 'permission': _user_group.permission,
193 193 'origin': get_origin(_user_group),
194 194 'type': "user_group",
195 195 }
196 196 permissions.append(user_group_data)
197 197
198 198 following_users = [
199 199 user.user.get_api_data(include_secrets=include_secrets)
200 200 for user in repo.followers]
201 201
202 202 if not cache:
203 203 repo.update_commit_cache()
204 204 data = repo.get_api_data(include_secrets=include_secrets)
205 205 data['permissions'] = permissions
206 206 data['followers'] = following_users
207 207 return data
208 208
209 209
210 210 @jsonrpc_method()
211 211 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
212 212 """
213 213 Lists all existing repositories.
214 214
215 215 This command can only be run using an |authtoken| with admin rights,
216 216 or users with at least read rights to |repos|.
217 217
218 218 :param apiuser: This is filled automatically from the |authtoken|.
219 219 :type apiuser: AuthUser
220 220 :param root: specify root repository group to fetch repositories.
221 221 filters the returned repositories to be members of given root group.
222 222 :type root: Optional(None)
223 223 :param traverse: traverse given root into subrepositories. With this flag
224 224 set to False, it will only return top-level repositories from `root`.
225 225 if root is empty it will return just top-level repositories.
226 226 :type traverse: Optional(True)
227 227
228 228
229 229 Example output:
230 230
231 231 .. code-block:: bash
232 232
233 233 id : <id_given_in_input>
234 234 result: [
235 235 {
236 236 "repo_id" : "<repo_id>",
237 237 "repo_name" : "<reponame>"
238 238 "repo_type" : "<repo_type>",
239 239 "clone_uri" : "<clone_uri>",
240 240 "private": : "<bool>",
241 241 "created_on" : "<datetimecreated>",
242 242 "description" : "<description>",
243 243 "landing_rev": "<landing_rev>",
244 244 "owner": "<repo_owner>",
245 245 "fork_of": "<name_of_fork_parent>",
246 246 "enable_downloads": "<bool>",
247 247 "enable_locking": "<bool>",
248 248 "enable_statistics": "<bool>",
249 249 },
250 250 ...
251 251 ]
252 252 error: null
253 253 """
254 254
255 255 include_secrets = has_superadmin_permission(apiuser)
256 256 _perms = ('repository.read', 'repository.write', 'repository.admin',)
257 257 extras = {'user': apiuser}
258 258
259 259 root = Optional.extract(root)
260 260 traverse = Optional.extract(traverse, binary=True)
261 261
262 262 if root:
263 263 # verify parent existance, if it's empty return an error
264 264 parent = RepoGroup.get_by_group_name(root)
265 265 if not parent:
266 266 raise JSONRPCError(
267 267 'Root repository group `{}` does not exist'.format(root))
268 268
269 269 if traverse:
270 270 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
271 271 else:
272 272 repos = RepoModel().get_repos_for_root(root=parent)
273 273 else:
274 274 if traverse:
275 275 repos = RepoModel().get_all()
276 276 else:
277 277 # return just top-level
278 278 repos = RepoModel().get_repos_for_root(root=None)
279 279
280 280 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
281 281 return [repo.get_api_data(include_secrets=include_secrets)
282 282 for repo in repo_list]
283 283
284 284
285 285 @jsonrpc_method()
286 286 def get_repo_changeset(request, apiuser, repoid, revision,
287 287 details=Optional('basic')):
288 288 """
289 289 Returns information about a changeset.
290 290
291 291 Additionally parameters define the amount of details returned by
292 292 this function.
293 293
294 294 This command can only be run using an |authtoken| with admin rights,
295 295 or users with at least read rights to the |repo|.
296 296
297 297 :param apiuser: This is filled automatically from the |authtoken|.
298 298 :type apiuser: AuthUser
299 299 :param repoid: The repository name or repository id
300 300 :type repoid: str or int
301 301 :param revision: revision for which listing should be done
302 302 :type revision: str
303 303 :param details: details can be 'basic|extended|full' full gives diff
304 304 info details like the diff itself, and number of changed files etc.
305 305 :type details: Optional(str)
306 306
307 307 """
308 308 repo = get_repo_or_error(repoid)
309 309 if not has_superadmin_permission(apiuser):
310 310 _perms = ('repository.admin', 'repository.write', 'repository.read',)
311 311 validate_repo_permissions(apiuser, repoid, repo, _perms)
312 312
313 313 changes_details = Optional.extract(details)
314 314 _changes_details_types = ['basic', 'extended', 'full']
315 315 if changes_details not in _changes_details_types:
316 316 raise JSONRPCError(
317 317 'ret_type must be one of %s' % (
318 318 ','.join(_changes_details_types)))
319 319
320 320 vcs_repo = repo.scm_instance()
321 321 pre_load = ['author', 'branch', 'date', 'message', 'parents',
322 322 'status', '_commit', '_file_paths']
323 323
324 324 try:
325 325 commit = repo.get_commit(commit_id=revision, pre_load=pre_load)
326 326 except TypeError as e:
327 327 raise JSONRPCError(safe_str(e))
328 328 _cs_json = commit.__json__()
329 329 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
330 330 if changes_details == 'full':
331 331 _cs_json['refs'] = commit._get_refs()
332 332 return _cs_json
333 333
334 334
335 335 @jsonrpc_method()
336 336 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
337 337 details=Optional('basic')):
338 338 """
339 339 Returns a set of commits limited by the number starting
340 340 from the `start_rev` option.
341 341
342 342 Additional parameters define the amount of details returned by this
343 343 function.
344 344
345 345 This command can only be run using an |authtoken| with admin rights,
346 346 or users with at least read rights to |repos|.
347 347
348 348 :param apiuser: This is filled automatically from the |authtoken|.
349 349 :type apiuser: AuthUser
350 350 :param repoid: The repository name or repository ID.
351 351 :type repoid: str or int
352 352 :param start_rev: The starting revision from where to get changesets.
353 353 :type start_rev: str
354 354 :param limit: Limit the number of commits to this amount
355 355 :type limit: str or int
356 356 :param details: Set the level of detail returned. Valid option are:
357 357 ``basic``, ``extended`` and ``full``.
358 358 :type details: Optional(str)
359 359
360 360 .. note::
361 361
362 362 Setting the parameter `details` to the value ``full`` is extensive
363 363 and returns details like the diff itself, and the number
364 364 of changed files.
365 365
366 366 """
367 367 repo = get_repo_or_error(repoid)
368 368 if not has_superadmin_permission(apiuser):
369 369 _perms = ('repository.admin', 'repository.write', 'repository.read',)
370 370 validate_repo_permissions(apiuser, repoid, repo, _perms)
371 371
372 372 changes_details = Optional.extract(details)
373 373 _changes_details_types = ['basic', 'extended', 'full']
374 374 if changes_details not in _changes_details_types:
375 375 raise JSONRPCError(
376 376 'ret_type must be one of %s' % (
377 377 ','.join(_changes_details_types)))
378 378
379 379 limit = int(limit)
380 380 pre_load = ['author', 'branch', 'date', 'message', 'parents',
381 381 'status', '_commit', '_file_paths']
382 382
383 383 vcs_repo = repo.scm_instance()
384 384 # SVN needs a special case to distinguish its index and commit id
385 385 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
386 386 start_rev = vcs_repo.commit_ids[0]
387 387
388 388 try:
389 389 commits = vcs_repo.get_commits(
390 390 start_id=start_rev, pre_load=pre_load, translate_tags=False)
391 391 except TypeError as e:
392 392 raise JSONRPCError(safe_str(e))
393 393 except Exception:
394 394 log.exception('Fetching of commits failed')
395 395 raise JSONRPCError('Error occurred during commit fetching')
396 396
397 397 ret = []
398 398 for cnt, commit in enumerate(commits):
399 399 if cnt >= limit != -1:
400 400 break
401 401 _cs_json = commit.__json__()
402 402 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
403 403 if changes_details == 'full':
404 404 _cs_json['refs'] = {
405 405 'branches': [commit.branch],
406 406 'bookmarks': getattr(commit, 'bookmarks', []),
407 407 'tags': commit.tags
408 408 }
409 409 ret.append(_cs_json)
410 410 return ret
411 411
412 412
413 413 @jsonrpc_method()
414 414 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
415 415 ret_type=Optional('all'), details=Optional('basic'),
416 416 max_file_bytes=Optional(None)):
417 417 """
418 418 Returns a list of nodes and children in a flat list for a given
419 419 path at given revision.
420 420
421 421 It's possible to specify ret_type to show only `files` or `dirs`.
422 422
423 423 This command can only be run using an |authtoken| with admin rights,
424 424 or users with at least read rights to |repos|.
425 425
426 426 :param apiuser: This is filled automatically from the |authtoken|.
427 427 :type apiuser: AuthUser
428 428 :param repoid: The repository name or repository ID.
429 429 :type repoid: str or int
430 430 :param revision: The revision for which listing should be done.
431 431 :type revision: str
432 432 :param root_path: The path from which to start displaying.
433 433 :type root_path: str
434 434 :param ret_type: Set the return type. Valid options are
435 435 ``all`` (default), ``files`` and ``dirs``.
436 436 :type ret_type: Optional(str)
437 437 :param details: Returns extended information about nodes, such as
438 438 md5, binary, and or content.
439 439 The valid options are ``basic`` and ``full``.
440 440 :type details: Optional(str)
441 441 :param max_file_bytes: Only return file content under this file size bytes
442 442 :type details: Optional(int)
443 443
444 444 Example output:
445 445
446 446 .. code-block:: bash
447 447
448 448 id : <id_given_in_input>
449 449 result: [
450 450 {
451 451 "binary": false,
452 452 "content": "File line",
453 453 "extension": "md",
454 454 "lines": 2,
455 455 "md5": "059fa5d29b19c0657e384749480f6422",
456 456 "mimetype": "text/x-minidsrc",
457 457 "name": "file.md",
458 458 "size": 580,
459 459 "type": "file"
460 460 },
461 461 ...
462 462 ]
463 463 error: null
464 464 """
465 465
466 466 repo = get_repo_or_error(repoid)
467 467 if not has_superadmin_permission(apiuser):
468 468 _perms = ('repository.admin', 'repository.write', 'repository.read',)
469 469 validate_repo_permissions(apiuser, repoid, repo, _perms)
470 470
471 471 ret_type = Optional.extract(ret_type)
472 472 details = Optional.extract(details)
473 473 _extended_types = ['basic', 'full']
474 474 if details not in _extended_types:
475 475 raise JSONRPCError('ret_type must be one of %s' % (','.join(_extended_types)))
476 476 extended_info = False
477 477 content = False
478 478 if details == 'basic':
479 479 extended_info = True
480 480
481 481 if details == 'full':
482 482 extended_info = content = True
483 483
484 484 _map = {}
485 485 try:
486 486 # check if repo is not empty by any chance, skip quicker if it is.
487 487 _scm = repo.scm_instance()
488 488 if _scm.is_empty():
489 489 return []
490 490
491 491 _d, _f = ScmModel().get_nodes(
492 492 repo, revision, root_path, flat=False,
493 493 extended_info=extended_info, content=content,
494 494 max_file_bytes=max_file_bytes)
495 495 _map = {
496 496 'all': _d + _f,
497 497 'files': _f,
498 498 'dirs': _d,
499 499 }
500 500 return _map[ret_type]
501 501 except KeyError:
502 502 raise JSONRPCError(
503 503 'ret_type must be one of %s' % (','.join(sorted(_map.keys()))))
504 504 except Exception:
505 505 log.exception("Exception occurred while trying to get repo nodes")
506 506 raise JSONRPCError(
507 507 'failed to get repo: `%s` nodes' % repo.repo_name
508 508 )
509 509
510 510
511 511 @jsonrpc_method()
512 512 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
513 513 max_file_bytes=Optional(None), details=Optional('basic'),
514 514 cache=Optional(True)):
515 515 """
516 516 Returns a single file from repository at given revision.
517 517
518 518 This command can only be run using an |authtoken| with admin rights,
519 519 or users with at least read rights to |repos|.
520 520
521 521 :param apiuser: This is filled automatically from the |authtoken|.
522 522 :type apiuser: AuthUser
523 523 :param repoid: The repository name or repository ID.
524 524 :type repoid: str or int
525 525 :param commit_id: The revision for which listing should be done.
526 526 :type commit_id: str
527 527 :param file_path: The path from which to start displaying.
528 528 :type file_path: str
529 529 :param details: Returns different set of information about nodes.
530 530 The valid options are ``minimal`` ``basic`` and ``full``.
531 531 :type details: Optional(str)
532 532 :param max_file_bytes: Only return file content under this file size bytes
533 533 :type max_file_bytes: Optional(int)
534 534 :param cache: Use internal caches for fetching files. If disabled fetching
535 535 files is slower but more memory efficient
536 536 :type cache: Optional(bool)
537 537
538 538 Example output:
539 539
540 540 .. code-block:: bash
541 541
542 542 id : <id_given_in_input>
543 543 result: {
544 544 "binary": false,
545 545 "extension": "py",
546 546 "lines": 35,
547 547 "content": "....",
548 548 "md5": "76318336366b0f17ee249e11b0c99c41",
549 549 "mimetype": "text/x-python",
550 550 "name": "python.py",
551 551 "size": 817,
552 552 "type": "file",
553 553 }
554 554 error: null
555 555 """
556 556
557 557 repo = get_repo_or_error(repoid)
558 558 if not has_superadmin_permission(apiuser):
559 559 _perms = ('repository.admin', 'repository.write', 'repository.read',)
560 560 validate_repo_permissions(apiuser, repoid, repo, _perms)
561 561
562 562 cache = Optional.extract(cache, binary=True)
563 563 details = Optional.extract(details)
564 564 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
565 565 if details not in _extended_types:
566 566 raise JSONRPCError(
567 567 'ret_type must be one of %s, got %s' % (','.join(_extended_types)), details)
568 568 extended_info = False
569 569 content = False
570 570
571 571 if details == 'minimal':
572 572 extended_info = False
573 573
574 574 elif details == 'basic':
575 575 extended_info = True
576 576
577 577 elif details == 'full':
578 578 extended_info = content = True
579 579
580 580 file_path = safe_unicode(file_path)
581 581 try:
582 582 # check if repo is not empty by any chance, skip quicker if it is.
583 583 _scm = repo.scm_instance()
584 584 if _scm.is_empty():
585 585 return None
586 586
587 587 node = ScmModel().get_node(
588 588 repo, commit_id, file_path, extended_info=extended_info,
589 589 content=content, max_file_bytes=max_file_bytes, cache=cache)
590 590 except NodeDoesNotExistError:
591 591 raise JSONRPCError(u'There is no file in repo: `{}` at path `{}` for commit: `{}`'.format(
592 592 repo.repo_name, file_path, commit_id))
593 593 except Exception:
594 594 log.exception(u"Exception occurred while trying to get repo %s file",
595 595 repo.repo_name)
596 596 raise JSONRPCError(u'failed to get repo: `{}` file at path {}'.format(
597 597 repo.repo_name, file_path))
598 598
599 599 return node
600 600
601 601
602 602 @jsonrpc_method()
603 603 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
604 604 """
605 605 Returns a list of tree nodes for path at given revision. This api is built
606 606 strictly for usage in full text search building, and shouldn't be consumed
607 607
608 608 This command can only be run using an |authtoken| with admin rights,
609 609 or users with at least read rights to |repos|.
610 610
611 611 """
612 612
613 613 repo = get_repo_or_error(repoid)
614 614 if not has_superadmin_permission(apiuser):
615 615 _perms = ('repository.admin', 'repository.write', 'repository.read',)
616 616 validate_repo_permissions(apiuser, repoid, repo, _perms)
617 617
618 618 repo_id = repo.repo_id
619 619 cache_seconds = safe_int(rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
620 620 cache_on = cache_seconds > 0
621 621
622 622 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
623 623 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
624 624
625 625 def compute_fts_tree(cache_ver, repo_id, commit_id, root_path):
626 626 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
627 627
628 628 try:
629 629 # check if repo is not empty by any chance, skip quicker if it is.
630 630 _scm = repo.scm_instance()
631 631 if _scm.is_empty():
632 632 return []
633 633 except RepositoryError:
634 634 log.exception("Exception occurred while trying to get repo nodes")
635 635 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
636 636
637 637 try:
638 638 # we need to resolve commit_id to a FULL sha for cache to work correctly.
639 639 # sending 'master' is a pointer that needs to be translated to current commit.
640 640 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
641 641 log.debug(
642 642 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
643 643 'with caching: %s[TTL: %ss]' % (
644 644 repo_id, commit_id, cache_on, cache_seconds or 0))
645 645
646 646 tree_files = compute_fts_tree(rc_cache.FILE_TREE_CACHE_VER, repo_id, commit_id, root_path)
647 647 return tree_files
648 648
649 649 except Exception:
650 650 log.exception("Exception occurred while trying to get repo nodes")
651 651 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
652 652
653 653
654 654 @jsonrpc_method()
655 655 def get_repo_refs(request, apiuser, repoid):
656 656 """
657 657 Returns a dictionary of current references. It returns
658 658 bookmarks, branches, closed_branches, and tags for given repository
659 659
660 660 It's possible to specify ret_type to show only `files` or `dirs`.
661 661
662 662 This command can only be run using an |authtoken| with admin rights,
663 663 or users with at least read rights to |repos|.
664 664
665 665 :param apiuser: This is filled automatically from the |authtoken|.
666 666 :type apiuser: AuthUser
667 667 :param repoid: The repository name or repository ID.
668 668 :type repoid: str or int
669 669
670 670 Example output:
671 671
672 672 .. code-block:: bash
673 673
674 674 id : <id_given_in_input>
675 675 "result": {
676 676 "bookmarks": {
677 677 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
678 678 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
679 679 },
680 680 "branches": {
681 681 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
682 682 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
683 683 },
684 684 "branches_closed": {},
685 685 "tags": {
686 686 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
687 687 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
688 688 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
689 689 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
690 690 }
691 691 }
692 692 error: null
693 693 """
694 694
695 695 repo = get_repo_or_error(repoid)
696 696 if not has_superadmin_permission(apiuser):
697 697 _perms = ('repository.admin', 'repository.write', 'repository.read',)
698 698 validate_repo_permissions(apiuser, repoid, repo, _perms)
699 699
700 700 try:
701 701 # check if repo is not empty by any chance, skip quicker if it is.
702 702 vcs_instance = repo.scm_instance()
703 703 refs = vcs_instance.refs()
704 704 return refs
705 705 except Exception:
706 706 log.exception("Exception occurred while trying to get repo refs")
707 707 raise JSONRPCError(
708 708 'failed to get repo: `%s` references' % repo.repo_name
709 709 )
710 710
711 711
712 712 @jsonrpc_method()
713 713 def create_repo(
714 714 request, apiuser, repo_name, repo_type,
715 715 owner=Optional(OAttr('apiuser')),
716 716 description=Optional(''),
717 717 private=Optional(False),
718 718 clone_uri=Optional(None),
719 719 push_uri=Optional(None),
720 720 landing_rev=Optional(None),
721 721 enable_statistics=Optional(False),
722 722 enable_locking=Optional(False),
723 723 enable_downloads=Optional(False),
724 724 copy_permissions=Optional(False)):
725 725 """
726 726 Creates a repository.
727 727
728 728 * If the repository name contains "/", repository will be created inside
729 729 a repository group or nested repository groups
730 730
731 731 For example "foo/bar/repo1" will create |repo| called "repo1" inside
732 732 group "foo/bar". You have to have permissions to access and write to
733 733 the last repository group ("bar" in this example)
734 734
735 735 This command can only be run using an |authtoken| with at least
736 736 permissions to create repositories, or write permissions to
737 737 parent repository groups.
738 738
739 739 :param apiuser: This is filled automatically from the |authtoken|.
740 740 :type apiuser: AuthUser
741 741 :param repo_name: Set the repository name.
742 742 :type repo_name: str
743 743 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
744 744 :type repo_type: str
745 745 :param owner: user_id or username
746 746 :type owner: Optional(str)
747 747 :param description: Set the repository description.
748 748 :type description: Optional(str)
749 749 :param private: set repository as private
750 750 :type private: bool
751 751 :param clone_uri: set clone_uri
752 752 :type clone_uri: str
753 753 :param push_uri: set push_uri
754 754 :type push_uri: str
755 755 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
756 756 :type landing_rev: str
757 757 :param enable_locking:
758 758 :type enable_locking: bool
759 759 :param enable_downloads:
760 760 :type enable_downloads: bool
761 761 :param enable_statistics:
762 762 :type enable_statistics: bool
763 763 :param copy_permissions: Copy permission from group in which the
764 764 repository is being created.
765 765 :type copy_permissions: bool
766 766
767 767
768 768 Example output:
769 769
770 770 .. code-block:: bash
771 771
772 772 id : <id_given_in_input>
773 773 result: {
774 774 "msg": "Created new repository `<reponame>`",
775 775 "success": true,
776 776 "task": "<celery task id or None if done sync>"
777 777 }
778 778 error: null
779 779
780 780
781 781 Example error output:
782 782
783 783 .. code-block:: bash
784 784
785 785 id : <id_given_in_input>
786 786 result : null
787 787 error : {
788 788 'failed to create repository `<repo_name>`'
789 789 }
790 790
791 791 """
792 792
793 793 owner = validate_set_owner_permissions(apiuser, owner)
794 794
795 795 description = Optional.extract(description)
796 796 copy_permissions = Optional.extract(copy_permissions)
797 797 clone_uri = Optional.extract(clone_uri)
798 798 push_uri = Optional.extract(push_uri)
799 799
800 800 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
801 801 if isinstance(private, Optional):
802 802 private = defs.get('repo_private') or Optional.extract(private)
803 803 if isinstance(repo_type, Optional):
804 804 repo_type = defs.get('repo_type')
805 805 if isinstance(enable_statistics, Optional):
806 806 enable_statistics = defs.get('repo_enable_statistics')
807 807 if isinstance(enable_locking, Optional):
808 808 enable_locking = defs.get('repo_enable_locking')
809 809 if isinstance(enable_downloads, Optional):
810 810 enable_downloads = defs.get('repo_enable_downloads')
811 811
812 812 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
813 813 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
814 814 ref_choices = list(set(ref_choices + [landing_ref]))
815 815
816 816 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
817 817
818 818 schema = repo_schema.RepoSchema().bind(
819 819 repo_type_options=rhodecode.BACKENDS.keys(),
820 820 repo_ref_options=ref_choices,
821 821 repo_type=repo_type,
822 822 # user caller
823 823 user=apiuser)
824 824
825 825 try:
826 826 schema_data = schema.deserialize(dict(
827 827 repo_name=repo_name,
828 828 repo_type=repo_type,
829 829 repo_owner=owner.username,
830 830 repo_description=description,
831 831 repo_landing_commit_ref=landing_commit_ref,
832 832 repo_clone_uri=clone_uri,
833 833 repo_push_uri=push_uri,
834 834 repo_private=private,
835 835 repo_copy_permissions=copy_permissions,
836 836 repo_enable_statistics=enable_statistics,
837 837 repo_enable_downloads=enable_downloads,
838 838 repo_enable_locking=enable_locking))
839 839 except validation_schema.Invalid as err:
840 840 raise JSONRPCValidationError(colander_exc=err)
841 841
842 842 try:
843 843 data = {
844 844 'owner': owner,
845 845 'repo_name': schema_data['repo_group']['repo_name_without_group'],
846 846 'repo_name_full': schema_data['repo_name'],
847 847 'repo_group': schema_data['repo_group']['repo_group_id'],
848 848 'repo_type': schema_data['repo_type'],
849 849 'repo_description': schema_data['repo_description'],
850 850 'repo_private': schema_data['repo_private'],
851 851 'clone_uri': schema_data['repo_clone_uri'],
852 852 'push_uri': schema_data['repo_push_uri'],
853 853 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
854 854 'enable_statistics': schema_data['repo_enable_statistics'],
855 855 'enable_locking': schema_data['repo_enable_locking'],
856 856 'enable_downloads': schema_data['repo_enable_downloads'],
857 857 'repo_copy_permissions': schema_data['repo_copy_permissions'],
858 858 }
859 859
860 860 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
861 861 task_id = get_task_id(task)
862 862 # no commit, it's done in RepoModel, or async via celery
863 863 return {
864 864 'msg': "Created new repository `%s`" % (schema_data['repo_name'],),
865 865 'success': True, # cannot return the repo data here since fork
866 866 # can be done async
867 867 'task': task_id
868 868 }
869 869 except Exception:
870 870 log.exception(
871 871 u"Exception while trying to create the repository %s",
872 872 schema_data['repo_name'])
873 873 raise JSONRPCError(
874 874 'failed to create repository `%s`' % (schema_data['repo_name'],))
875 875
876 876
877 877 @jsonrpc_method()
878 878 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
879 879 description=Optional('')):
880 880 """
881 881 Adds an extra field to a repository.
882 882
883 883 This command can only be run using an |authtoken| with at least
884 884 write permissions to the |repo|.
885 885
886 886 :param apiuser: This is filled automatically from the |authtoken|.
887 887 :type apiuser: AuthUser
888 888 :param repoid: Set the repository name or repository id.
889 889 :type repoid: str or int
890 890 :param key: Create a unique field key for this repository.
891 891 :type key: str
892 892 :param label:
893 893 :type label: Optional(str)
894 894 :param description:
895 895 :type description: Optional(str)
896 896 """
897 897 repo = get_repo_or_error(repoid)
898 898 if not has_superadmin_permission(apiuser):
899 899 _perms = ('repository.admin',)
900 900 validate_repo_permissions(apiuser, repoid, repo, _perms)
901 901
902 902 label = Optional.extract(label) or key
903 903 description = Optional.extract(description)
904 904
905 905 field = RepositoryField.get_by_key_name(key, repo)
906 906 if field:
907 907 raise JSONRPCError('Field with key '
908 908 '`%s` exists for repo `%s`' % (key, repoid))
909 909
910 910 try:
911 911 RepoModel().add_repo_field(repo, key, field_label=label,
912 912 field_desc=description)
913 913 Session().commit()
914 914 return {
915 915 'msg': "Added new repository field `%s`" % (key,),
916 916 'success': True,
917 917 }
918 918 except Exception:
919 919 log.exception("Exception occurred while trying to add field to repo")
920 920 raise JSONRPCError(
921 921 'failed to create new field for repository `%s`' % (repoid,))
922 922
923 923
924 924 @jsonrpc_method()
925 925 def remove_field_from_repo(request, apiuser, repoid, key):
926 926 """
927 927 Removes an extra field from a repository.
928 928
929 929 This command can only be run using an |authtoken| with at least
930 930 write permissions to the |repo|.
931 931
932 932 :param apiuser: This is filled automatically from the |authtoken|.
933 933 :type apiuser: AuthUser
934 934 :param repoid: Set the repository name or repository ID.
935 935 :type repoid: str or int
936 936 :param key: Set the unique field key for this repository.
937 937 :type key: str
938 938 """
939 939
940 940 repo = get_repo_or_error(repoid)
941 941 if not has_superadmin_permission(apiuser):
942 942 _perms = ('repository.admin',)
943 943 validate_repo_permissions(apiuser, repoid, repo, _perms)
944 944
945 945 field = RepositoryField.get_by_key_name(key, repo)
946 946 if not field:
947 947 raise JSONRPCError('Field with key `%s` does not '
948 948 'exists for repo `%s`' % (key, repoid))
949 949
950 950 try:
951 951 RepoModel().delete_repo_field(repo, field_key=key)
952 952 Session().commit()
953 953 return {
954 954 'msg': "Deleted repository field `%s`" % (key,),
955 955 'success': True,
956 956 }
957 957 except Exception:
958 958 log.exception(
959 959 "Exception occurred while trying to delete field from repo")
960 960 raise JSONRPCError(
961 961 'failed to delete field for repository `%s`' % (repoid,))
962 962
963 963
964 964 @jsonrpc_method()
965 965 def update_repo(
966 966 request, apiuser, repoid, repo_name=Optional(None),
967 967 owner=Optional(OAttr('apiuser')), description=Optional(''),
968 968 private=Optional(False),
969 969 clone_uri=Optional(None), push_uri=Optional(None),
970 970 landing_rev=Optional(None), fork_of=Optional(None),
971 971 enable_statistics=Optional(False),
972 972 enable_locking=Optional(False),
973 973 enable_downloads=Optional(False), fields=Optional('')):
974 974 """
975 975 Updates a repository with the given information.
976 976
977 977 This command can only be run using an |authtoken| with at least
978 978 admin permissions to the |repo|.
979 979
980 980 * If the repository name contains "/", repository will be updated
981 981 accordingly with a repository group or nested repository groups
982 982
983 983 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
984 984 called "repo-test" and place it inside group "foo/bar".
985 985 You have to have permissions to access and write to the last repository
986 986 group ("bar" in this example)
987 987
988 988 :param apiuser: This is filled automatically from the |authtoken|.
989 989 :type apiuser: AuthUser
990 990 :param repoid: repository name or repository ID.
991 991 :type repoid: str or int
992 992 :param repo_name: Update the |repo| name, including the
993 993 repository group it's in.
994 994 :type repo_name: str
995 995 :param owner: Set the |repo| owner.
996 996 :type owner: str
997 997 :param fork_of: Set the |repo| as fork of another |repo|.
998 998 :type fork_of: str
999 999 :param description: Update the |repo| description.
1000 1000 :type description: str
1001 1001 :param private: Set the |repo| as private. (True | False)
1002 1002 :type private: bool
1003 1003 :param clone_uri: Update the |repo| clone URI.
1004 1004 :type clone_uri: str
1005 1005 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1006 1006 :type landing_rev: str
1007 1007 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1008 1008 :type enable_statistics: bool
1009 1009 :param enable_locking: Enable |repo| locking.
1010 1010 :type enable_locking: bool
1011 1011 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1012 1012 :type enable_downloads: bool
1013 1013 :param fields: Add extra fields to the |repo|. Use the following
1014 1014 example format: ``field_key=field_val,field_key2=fieldval2``.
1015 1015 Escape ', ' with \,
1016 1016 :type fields: str
1017 1017 """
1018 1018
1019 1019 repo = get_repo_or_error(repoid)
1020 1020
1021 1021 include_secrets = False
1022 1022 if not has_superadmin_permission(apiuser):
1023 1023 _perms = ('repository.admin',)
1024 1024 validate_repo_permissions(apiuser, repoid, repo, _perms)
1025 1025 else:
1026 1026 include_secrets = True
1027 1027
1028 1028 updates = dict(
1029 1029 repo_name=repo_name
1030 1030 if not isinstance(repo_name, Optional) else repo.repo_name,
1031 1031
1032 1032 fork_id=fork_of
1033 1033 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1034 1034
1035 1035 user=owner
1036 1036 if not isinstance(owner, Optional) else repo.user.username,
1037 1037
1038 1038 repo_description=description
1039 1039 if not isinstance(description, Optional) else repo.description,
1040 1040
1041 1041 repo_private=private
1042 1042 if not isinstance(private, Optional) else repo.private,
1043 1043
1044 1044 clone_uri=clone_uri
1045 1045 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1046 1046
1047 1047 push_uri=push_uri
1048 1048 if not isinstance(push_uri, Optional) else repo.push_uri,
1049 1049
1050 1050 repo_landing_rev=landing_rev
1051 1051 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1052 1052
1053 1053 repo_enable_statistics=enable_statistics
1054 1054 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1055 1055
1056 1056 repo_enable_locking=enable_locking
1057 1057 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1058 1058
1059 1059 repo_enable_downloads=enable_downloads
1060 1060 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1061 1061
1062 1062 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1063 1063 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1064 1064 request.translate, repo=repo)
1065 1065 ref_choices = list(set(ref_choices + [landing_ref]))
1066 1066
1067 1067 old_values = repo.get_api_data()
1068 1068 repo_type = repo.repo_type
1069 1069 schema = repo_schema.RepoSchema().bind(
1070 1070 repo_type_options=rhodecode.BACKENDS.keys(),
1071 1071 repo_ref_options=ref_choices,
1072 1072 repo_type=repo_type,
1073 1073 # user caller
1074 1074 user=apiuser,
1075 1075 old_values=old_values)
1076 1076 try:
1077 1077 schema_data = schema.deserialize(dict(
1078 1078 # we save old value, users cannot change type
1079 1079 repo_type=repo_type,
1080 1080
1081 1081 repo_name=updates['repo_name'],
1082 1082 repo_owner=updates['user'],
1083 1083 repo_description=updates['repo_description'],
1084 1084 repo_clone_uri=updates['clone_uri'],
1085 1085 repo_push_uri=updates['push_uri'],
1086 1086 repo_fork_of=updates['fork_id'],
1087 1087 repo_private=updates['repo_private'],
1088 1088 repo_landing_commit_ref=updates['repo_landing_rev'],
1089 1089 repo_enable_statistics=updates['repo_enable_statistics'],
1090 1090 repo_enable_downloads=updates['repo_enable_downloads'],
1091 1091 repo_enable_locking=updates['repo_enable_locking']))
1092 1092 except validation_schema.Invalid as err:
1093 1093 raise JSONRPCValidationError(colander_exc=err)
1094 1094
1095 1095 # save validated data back into the updates dict
1096 1096 validated_updates = dict(
1097 1097 repo_name=schema_data['repo_group']['repo_name_without_group'],
1098 1098 repo_group=schema_data['repo_group']['repo_group_id'],
1099 1099
1100 1100 user=schema_data['repo_owner'],
1101 1101 repo_description=schema_data['repo_description'],
1102 1102 repo_private=schema_data['repo_private'],
1103 1103 clone_uri=schema_data['repo_clone_uri'],
1104 1104 push_uri=schema_data['repo_push_uri'],
1105 1105 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1106 1106 repo_enable_statistics=schema_data['repo_enable_statistics'],
1107 1107 repo_enable_locking=schema_data['repo_enable_locking'],
1108 1108 repo_enable_downloads=schema_data['repo_enable_downloads'],
1109 1109 )
1110 1110
1111 1111 if schema_data['repo_fork_of']:
1112 1112 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1113 1113 validated_updates['fork_id'] = fork_repo.repo_id
1114 1114
1115 1115 # extra fields
1116 1116 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1117 1117 if fields:
1118 1118 validated_updates.update(fields)
1119 1119
1120 1120 try:
1121 1121 RepoModel().update(repo, **validated_updates)
1122 1122 audit_logger.store_api(
1123 1123 'repo.edit', action_data={'old_data': old_values},
1124 1124 user=apiuser, repo=repo)
1125 1125 Session().commit()
1126 1126 return {
1127 1127 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
1128 1128 'repository': repo.get_api_data(include_secrets=include_secrets)
1129 1129 }
1130 1130 except Exception:
1131 1131 log.exception(
1132 1132 u"Exception while trying to update the repository %s",
1133 1133 repoid)
1134 1134 raise JSONRPCError('failed to update repo `%s`' % repoid)
1135 1135
1136 1136
1137 1137 @jsonrpc_method()
1138 1138 def fork_repo(request, apiuser, repoid, fork_name,
1139 1139 owner=Optional(OAttr('apiuser')),
1140 1140 description=Optional(''),
1141 1141 private=Optional(False),
1142 1142 clone_uri=Optional(None),
1143 1143 landing_rev=Optional(None),
1144 1144 copy_permissions=Optional(False)):
1145 1145 """
1146 1146 Creates a fork of the specified |repo|.
1147 1147
1148 1148 * If the fork_name contains "/", fork will be created inside
1149 1149 a repository group or nested repository groups
1150 1150
1151 1151 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1152 1152 inside group "foo/bar". You have to have permissions to access and
1153 1153 write to the last repository group ("bar" in this example)
1154 1154
1155 1155 This command can only be run using an |authtoken| with minimum
1156 1156 read permissions of the forked repo, create fork permissions for an user.
1157 1157
1158 1158 :param apiuser: This is filled automatically from the |authtoken|.
1159 1159 :type apiuser: AuthUser
1160 1160 :param repoid: Set repository name or repository ID.
1161 1161 :type repoid: str or int
1162 1162 :param fork_name: Set the fork name, including it's repository group membership.
1163 1163 :type fork_name: str
1164 1164 :param owner: Set the fork owner.
1165 1165 :type owner: str
1166 1166 :param description: Set the fork description.
1167 1167 :type description: str
1168 1168 :param copy_permissions: Copy permissions from parent |repo|. The
1169 1169 default is False.
1170 1170 :type copy_permissions: bool
1171 1171 :param private: Make the fork private. The default is False.
1172 1172 :type private: bool
1173 1173 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1174 1174
1175 1175 Example output:
1176 1176
1177 1177 .. code-block:: bash
1178 1178
1179 1179 id : <id_for_response>
1180 1180 api_key : "<api_key>"
1181 1181 args: {
1182 1182 "repoid" : "<reponame or repo_id>",
1183 1183 "fork_name": "<forkname>",
1184 1184 "owner": "<username or user_id = Optional(=apiuser)>",
1185 1185 "description": "<description>",
1186 1186 "copy_permissions": "<bool>",
1187 1187 "private": "<bool>",
1188 1188 "landing_rev": "<landing_rev>"
1189 1189 }
1190 1190
1191 1191 Example error output:
1192 1192
1193 1193 .. code-block:: bash
1194 1194
1195 1195 id : <id_given_in_input>
1196 1196 result: {
1197 1197 "msg": "Created fork of `<reponame>` as `<forkname>`",
1198 1198 "success": true,
1199 1199 "task": "<celery task id or None if done sync>"
1200 1200 }
1201 1201 error: null
1202 1202
1203 1203 """
1204 1204
1205 1205 repo = get_repo_or_error(repoid)
1206 1206 repo_name = repo.repo_name
1207 1207
1208 1208 if not has_superadmin_permission(apiuser):
1209 1209 # check if we have at least read permission for
1210 1210 # this repo that we fork !
1211 1211 _perms = ('repository.admin', 'repository.write', 'repository.read')
1212 1212 validate_repo_permissions(apiuser, repoid, repo, _perms)
1213 1213
1214 1214 # check if the regular user has at least fork permissions as well
1215 if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser):
1215 if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser):
1216 1216 raise JSONRPCForbidden()
1217 1217
1218 1218 # check if user can set owner parameter
1219 1219 owner = validate_set_owner_permissions(apiuser, owner)
1220 1220
1221 1221 description = Optional.extract(description)
1222 1222 copy_permissions = Optional.extract(copy_permissions)
1223 1223 clone_uri = Optional.extract(clone_uri)
1224 1224
1225 1225 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1226 1226 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1227 1227 ref_choices = list(set(ref_choices + [landing_ref]))
1228 1228 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1229 1229
1230 1230 private = Optional.extract(private)
1231 1231
1232 1232 schema = repo_schema.RepoSchema().bind(
1233 1233 repo_type_options=rhodecode.BACKENDS.keys(),
1234 1234 repo_ref_options=ref_choices,
1235 1235 repo_type=repo.repo_type,
1236 1236 # user caller
1237 1237 user=apiuser)
1238 1238
1239 1239 try:
1240 1240 schema_data = schema.deserialize(dict(
1241 1241 repo_name=fork_name,
1242 1242 repo_type=repo.repo_type,
1243 1243 repo_owner=owner.username,
1244 1244 repo_description=description,
1245 1245 repo_landing_commit_ref=landing_commit_ref,
1246 1246 repo_clone_uri=clone_uri,
1247 1247 repo_private=private,
1248 1248 repo_copy_permissions=copy_permissions))
1249 1249 except validation_schema.Invalid as err:
1250 1250 raise JSONRPCValidationError(colander_exc=err)
1251 1251
1252 1252 try:
1253 1253 data = {
1254 1254 'fork_parent_id': repo.repo_id,
1255 1255
1256 1256 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1257 1257 'repo_name_full': schema_data['repo_name'],
1258 1258 'repo_group': schema_data['repo_group']['repo_group_id'],
1259 1259 'repo_type': schema_data['repo_type'],
1260 1260 'description': schema_data['repo_description'],
1261 1261 'private': schema_data['repo_private'],
1262 1262 'copy_permissions': schema_data['repo_copy_permissions'],
1263 1263 'landing_rev': schema_data['repo_landing_commit_ref'],
1264 1264 }
1265 1265
1266 1266 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1267 1267 # no commit, it's done in RepoModel, or async via celery
1268 1268 task_id = get_task_id(task)
1269 1269
1270 1270 return {
1271 1271 'msg': 'Created fork of `%s` as `%s`' % (
1272 1272 repo.repo_name, schema_data['repo_name']),
1273 1273 'success': True, # cannot return the repo data here since fork
1274 1274 # can be done async
1275 1275 'task': task_id
1276 1276 }
1277 1277 except Exception:
1278 1278 log.exception(
1279 1279 u"Exception while trying to create fork %s",
1280 1280 schema_data['repo_name'])
1281 1281 raise JSONRPCError(
1282 1282 'failed to fork repository `%s` as `%s`' % (
1283 1283 repo_name, schema_data['repo_name']))
1284 1284
1285 1285
1286 1286 @jsonrpc_method()
1287 1287 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1288 1288 """
1289 1289 Deletes a repository.
1290 1290
1291 1291 * When the `forks` parameter is set it's possible to detach or delete
1292 1292 forks of deleted repository.
1293 1293
1294 1294 This command can only be run using an |authtoken| with admin
1295 1295 permissions on the |repo|.
1296 1296
1297 1297 :param apiuser: This is filled automatically from the |authtoken|.
1298 1298 :type apiuser: AuthUser
1299 1299 :param repoid: Set the repository name or repository ID.
1300 1300 :type repoid: str or int
1301 1301 :param forks: Set to `detach` or `delete` forks from the |repo|.
1302 1302 :type forks: Optional(str)
1303 1303
1304 1304 Example error output:
1305 1305
1306 1306 .. code-block:: bash
1307 1307
1308 1308 id : <id_given_in_input>
1309 1309 result: {
1310 1310 "msg": "Deleted repository `<reponame>`",
1311 1311 "success": true
1312 1312 }
1313 1313 error: null
1314 1314 """
1315 1315
1316 1316 repo = get_repo_or_error(repoid)
1317 1317 repo_name = repo.repo_name
1318 1318 if not has_superadmin_permission(apiuser):
1319 1319 _perms = ('repository.admin',)
1320 1320 validate_repo_permissions(apiuser, repoid, repo, _perms)
1321 1321
1322 1322 try:
1323 1323 handle_forks = Optional.extract(forks)
1324 1324 _forks_msg = ''
1325 1325 _forks = [f for f in repo.forks]
1326 1326 if handle_forks == 'detach':
1327 1327 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1328 1328 elif handle_forks == 'delete':
1329 1329 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1330 1330 elif _forks:
1331 1331 raise JSONRPCError(
1332 1332 'Cannot delete `%s` it still contains attached forks' %
1333 1333 (repo.repo_name,)
1334 1334 )
1335 1335 old_data = repo.get_api_data()
1336 1336 RepoModel().delete(repo, forks=forks)
1337 1337
1338 1338 repo = audit_logger.RepoWrap(repo_id=None,
1339 1339 repo_name=repo.repo_name)
1340 1340
1341 1341 audit_logger.store_api(
1342 1342 'repo.delete', action_data={'old_data': old_data},
1343 1343 user=apiuser, repo=repo)
1344 1344
1345 1345 ScmModel().mark_for_invalidation(repo_name, delete=True)
1346 1346 Session().commit()
1347 1347 return {
1348 1348 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg),
1349 1349 'success': True
1350 1350 }
1351 1351 except Exception:
1352 1352 log.exception("Exception occurred while trying to delete repo")
1353 1353 raise JSONRPCError(
1354 1354 'failed to delete repository `%s`' % (repo_name,)
1355 1355 )
1356 1356
1357 1357
1358 1358 #TODO: marcink, change name ?
1359 1359 @jsonrpc_method()
1360 1360 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1361 1361 """
1362 1362 Invalidates the cache for the specified repository.
1363 1363
1364 1364 This command can only be run using an |authtoken| with admin rights to
1365 1365 the specified repository.
1366 1366
1367 1367 This command takes the following options:
1368 1368
1369 1369 :param apiuser: This is filled automatically from |authtoken|.
1370 1370 :type apiuser: AuthUser
1371 1371 :param repoid: Sets the repository name or repository ID.
1372 1372 :type repoid: str or int
1373 1373 :param delete_keys: This deletes the invalidated keys instead of
1374 1374 just flagging them.
1375 1375 :type delete_keys: Optional(``True`` | ``False``)
1376 1376
1377 1377 Example output:
1378 1378
1379 1379 .. code-block:: bash
1380 1380
1381 1381 id : <id_given_in_input>
1382 1382 result : {
1383 1383 'msg': Cache for repository `<repository name>` was invalidated,
1384 1384 'repository': <repository name>
1385 1385 }
1386 1386 error : null
1387 1387
1388 1388 Example error output:
1389 1389
1390 1390 .. code-block:: bash
1391 1391
1392 1392 id : <id_given_in_input>
1393 1393 result : null
1394 1394 error : {
1395 1395 'Error occurred during cache invalidation action'
1396 1396 }
1397 1397
1398 1398 """
1399 1399
1400 1400 repo = get_repo_or_error(repoid)
1401 1401 if not has_superadmin_permission(apiuser):
1402 1402 _perms = ('repository.admin', 'repository.write',)
1403 1403 validate_repo_permissions(apiuser, repoid, repo, _perms)
1404 1404
1405 1405 delete = Optional.extract(delete_keys)
1406 1406 try:
1407 1407 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1408 1408 return {
1409 1409 'msg': 'Cache for repository `%s` was invalidated' % (repoid,),
1410 1410 'repository': repo.repo_name
1411 1411 }
1412 1412 except Exception:
1413 1413 log.exception(
1414 1414 "Exception occurred while trying to invalidate repo cache")
1415 1415 raise JSONRPCError(
1416 1416 'Error occurred during cache invalidation action'
1417 1417 )
1418 1418
1419 1419
1420 1420 #TODO: marcink, change name ?
1421 1421 @jsonrpc_method()
1422 1422 def lock(request, apiuser, repoid, locked=Optional(None),
1423 1423 userid=Optional(OAttr('apiuser'))):
1424 1424 """
1425 1425 Sets the lock state of the specified |repo| by the given user.
1426 1426 From more information, see :ref:`repo-locking`.
1427 1427
1428 1428 * If the ``userid`` option is not set, the repository is locked to the
1429 1429 user who called the method.
1430 1430 * If the ``locked`` parameter is not set, the current lock state of the
1431 1431 repository is displayed.
1432 1432
1433 1433 This command can only be run using an |authtoken| with admin rights to
1434 1434 the specified repository.
1435 1435
1436 1436 This command takes the following options:
1437 1437
1438 1438 :param apiuser: This is filled automatically from the |authtoken|.
1439 1439 :type apiuser: AuthUser
1440 1440 :param repoid: Sets the repository name or repository ID.
1441 1441 :type repoid: str or int
1442 1442 :param locked: Sets the lock state.
1443 1443 :type locked: Optional(``True`` | ``False``)
1444 1444 :param userid: Set the repository lock to this user.
1445 1445 :type userid: Optional(str or int)
1446 1446
1447 1447 Example error output:
1448 1448
1449 1449 .. code-block:: bash
1450 1450
1451 1451 id : <id_given_in_input>
1452 1452 result : {
1453 1453 'repo': '<reponame>',
1454 1454 'locked': <bool: lock state>,
1455 1455 'locked_since': <int: lock timestamp>,
1456 1456 'locked_by': <username of person who made the lock>,
1457 1457 'lock_reason': <str: reason for locking>,
1458 1458 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1459 1459 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1460 1460 or
1461 1461 'msg': 'Repo `<repository name>` not locked.'
1462 1462 or
1463 1463 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1464 1464 }
1465 1465 error : null
1466 1466
1467 1467 Example error output:
1468 1468
1469 1469 .. code-block:: bash
1470 1470
1471 1471 id : <id_given_in_input>
1472 1472 result : null
1473 1473 error : {
1474 1474 'Error occurred locking repository `<reponame>`'
1475 1475 }
1476 1476 """
1477 1477
1478 1478 repo = get_repo_or_error(repoid)
1479 1479 if not has_superadmin_permission(apiuser):
1480 1480 # check if we have at least write permission for this repo !
1481 1481 _perms = ('repository.admin', 'repository.write',)
1482 1482 validate_repo_permissions(apiuser, repoid, repo, _perms)
1483 1483
1484 1484 # make sure normal user does not pass someone else userid,
1485 1485 # he is not allowed to do that
1486 1486 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1487 1487 raise JSONRPCError('userid is not the same as your user')
1488 1488
1489 1489 if isinstance(userid, Optional):
1490 1490 userid = apiuser.user_id
1491 1491
1492 1492 user = get_user_or_error(userid)
1493 1493
1494 1494 if isinstance(locked, Optional):
1495 1495 lockobj = repo.locked
1496 1496
1497 1497 if lockobj[0] is None:
1498 1498 _d = {
1499 1499 'repo': repo.repo_name,
1500 1500 'locked': False,
1501 1501 'locked_since': None,
1502 1502 'locked_by': None,
1503 1503 'lock_reason': None,
1504 1504 'lock_state_changed': False,
1505 1505 'msg': 'Repo `%s` not locked.' % repo.repo_name
1506 1506 }
1507 1507 return _d
1508 1508 else:
1509 1509 _user_id, _time, _reason = lockobj
1510 1510 lock_user = get_user_or_error(userid)
1511 1511 _d = {
1512 1512 'repo': repo.repo_name,
1513 1513 'locked': True,
1514 1514 'locked_since': _time,
1515 1515 'locked_by': lock_user.username,
1516 1516 'lock_reason': _reason,
1517 1517 'lock_state_changed': False,
1518 1518 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1519 1519 % (repo.repo_name, lock_user.username,
1520 1520 json.dumps(time_to_datetime(_time))))
1521 1521 }
1522 1522 return _d
1523 1523
1524 1524 # force locked state through a flag
1525 1525 else:
1526 1526 locked = str2bool(locked)
1527 1527 lock_reason = Repository.LOCK_API
1528 1528 try:
1529 1529 if locked:
1530 1530 lock_time = time.time()
1531 1531 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1532 1532 else:
1533 1533 lock_time = None
1534 1534 Repository.unlock(repo)
1535 1535 _d = {
1536 1536 'repo': repo.repo_name,
1537 1537 'locked': locked,
1538 1538 'locked_since': lock_time,
1539 1539 'locked_by': user.username,
1540 1540 'lock_reason': lock_reason,
1541 1541 'lock_state_changed': True,
1542 1542 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1543 1543 % (user.username, repo.repo_name, locked))
1544 1544 }
1545 1545 return _d
1546 1546 except Exception:
1547 1547 log.exception(
1548 1548 "Exception occurred while trying to lock repository")
1549 1549 raise JSONRPCError(
1550 1550 'Error occurred locking repository `%s`' % repo.repo_name
1551 1551 )
1552 1552
1553 1553
1554 1554 @jsonrpc_method()
1555 1555 def comment_commit(
1556 1556 request, apiuser, repoid, commit_id, message, status=Optional(None),
1557 1557 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1558 1558 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1559 1559 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1560 1560 """
1561 1561 Set a commit comment, and optionally change the status of the commit.
1562 1562
1563 1563 :param apiuser: This is filled automatically from the |authtoken|.
1564 1564 :type apiuser: AuthUser
1565 1565 :param repoid: Set the repository name or repository ID.
1566 1566 :type repoid: str or int
1567 1567 :param commit_id: Specify the commit_id for which to set a comment.
1568 1568 :type commit_id: str
1569 1569 :param message: The comment text.
1570 1570 :type message: str
1571 1571 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1572 1572 'approved', 'rejected', 'under_review'
1573 1573 :type status: str
1574 1574 :param comment_type: Comment type, one of: 'note', 'todo'
1575 1575 :type comment_type: Optional(str), default: 'note'
1576 1576 :param resolves_comment_id: id of comment which this one will resolve
1577 1577 :type resolves_comment_id: Optional(int)
1578 1578 :param extra_recipients: list of user ids or usernames to add
1579 1579 notifications for this comment. Acts like a CC for notification
1580 1580 :type extra_recipients: Optional(list)
1581 1581 :param userid: Set the user name of the comment creator.
1582 1582 :type userid: Optional(str or int)
1583 1583 :param send_email: Define if this comment should also send email notification
1584 1584 :type send_email: Optional(bool)
1585 1585
1586 1586 Example error output:
1587 1587
1588 1588 .. code-block:: bash
1589 1589
1590 1590 {
1591 1591 "id" : <id_given_in_input>,
1592 1592 "result" : {
1593 1593 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1594 1594 "status_change": null or <status>,
1595 1595 "success": true
1596 1596 },
1597 1597 "error" : null
1598 1598 }
1599 1599
1600 1600 """
1601 1601 _ = request.translate
1602 1602
1603 1603 repo = get_repo_or_error(repoid)
1604 1604 if not has_superadmin_permission(apiuser):
1605 1605 _perms = ('repository.read', 'repository.write', 'repository.admin')
1606 1606 validate_repo_permissions(apiuser, repoid, repo, _perms)
1607 1607 db_repo_name = repo.repo_name
1608 1608
1609 1609 try:
1610 1610 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1611 1611 commit_id = commit.raw_id
1612 1612 except Exception as e:
1613 1613 log.exception('Failed to fetch commit')
1614 1614 raise JSONRPCError(safe_str(e))
1615 1615
1616 1616 if isinstance(userid, Optional):
1617 1617 userid = apiuser.user_id
1618 1618
1619 1619 user = get_user_or_error(userid)
1620 1620 status = Optional.extract(status)
1621 1621 comment_type = Optional.extract(comment_type)
1622 1622 resolves_comment_id = Optional.extract(resolves_comment_id)
1623 1623 extra_recipients = Optional.extract(extra_recipients)
1624 1624 send_email = Optional.extract(send_email, binary=True)
1625 1625
1626 1626 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1627 1627 if status and status not in allowed_statuses:
1628 1628 raise JSONRPCError('Bad status, must be on '
1629 1629 'of %s got %s' % (allowed_statuses, status,))
1630 1630
1631 1631 if resolves_comment_id:
1632 1632 comment = ChangesetComment.get(resolves_comment_id)
1633 1633 if not comment:
1634 1634 raise JSONRPCError(
1635 1635 'Invalid resolves_comment_id `%s` for this commit.'
1636 1636 % resolves_comment_id)
1637 1637 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1638 1638 raise JSONRPCError(
1639 1639 'Comment `%s` is wrong type for setting status to resolved.'
1640 1640 % resolves_comment_id)
1641 1641
1642 1642 try:
1643 1643 rc_config = SettingsModel().get_all_settings()
1644 1644 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1645 1645 status_change_label = ChangesetStatus.get_status_lbl(status)
1646 1646 comment = CommentsModel().create(
1647 1647 message, repo, user, commit_id=commit_id,
1648 1648 status_change=status_change_label,
1649 1649 status_change_type=status,
1650 1650 renderer=renderer,
1651 1651 comment_type=comment_type,
1652 1652 resolves_comment_id=resolves_comment_id,
1653 1653 auth_user=apiuser,
1654 1654 extra_recipients=extra_recipients,
1655 1655 send_email=send_email
1656 1656 )
1657 1657 is_inline = comment.is_inline
1658 1658
1659 1659 if status:
1660 1660 # also do a status change
1661 1661 try:
1662 1662 ChangesetStatusModel().set_status(
1663 1663 repo, status, user, comment, revision=commit_id,
1664 1664 dont_allow_on_closed_pull_request=True
1665 1665 )
1666 1666 except StatusChangeOnClosedPullRequestError:
1667 1667 log.exception(
1668 1668 "Exception occurred while trying to change repo commit status")
1669 1669 msg = ('Changing status on a commit associated with '
1670 1670 'a closed pull request is not allowed')
1671 1671 raise JSONRPCError(msg)
1672 1672
1673 1673 CommentsModel().trigger_commit_comment_hook(
1674 1674 repo, apiuser, 'create',
1675 1675 data={'comment': comment, 'commit': commit})
1676 1676
1677 1677 Session().commit()
1678 1678
1679 1679 comment_broadcast_channel = channelstream.comment_channel(
1680 1680 db_repo_name, commit_obj=commit)
1681 1681
1682 1682 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1683 1683 comment_type = 'inline' if is_inline else 'general'
1684 1684 channelstream.comment_channelstream_push(
1685 1685 request, comment_broadcast_channel, apiuser,
1686 1686 _('posted a new {} comment').format(comment_type),
1687 1687 comment_data=comment_data)
1688 1688
1689 1689 return {
1690 1690 'msg': (
1691 1691 'Commented on commit `%s` for repository `%s`' % (
1692 1692 comment.revision, repo.repo_name)),
1693 1693 'status_change': status,
1694 1694 'success': True,
1695 1695 }
1696 1696 except JSONRPCError:
1697 1697 # catch any inside errors, and re-raise them to prevent from
1698 1698 # below global catch to silence them
1699 1699 raise
1700 1700 except Exception:
1701 1701 log.exception("Exception occurred while trying to comment on commit")
1702 1702 raise JSONRPCError(
1703 1703 'failed to set comment on repository `%s`' % (repo.repo_name,)
1704 1704 )
1705 1705
1706 1706
1707 1707 @jsonrpc_method()
1708 1708 def get_repo_comments(request, apiuser, repoid,
1709 1709 commit_id=Optional(None), comment_type=Optional(None),
1710 1710 userid=Optional(None)):
1711 1711 """
1712 1712 Get all comments for a repository
1713 1713
1714 1714 :param apiuser: This is filled automatically from the |authtoken|.
1715 1715 :type apiuser: AuthUser
1716 1716 :param repoid: Set the repository name or repository ID.
1717 1717 :type repoid: str or int
1718 1718 :param commit_id: Optionally filter the comments by the commit_id
1719 1719 :type commit_id: Optional(str), default: None
1720 1720 :param comment_type: Optionally filter the comments by the comment_type
1721 1721 one of: 'note', 'todo'
1722 1722 :type comment_type: Optional(str), default: None
1723 1723 :param userid: Optionally filter the comments by the author of comment
1724 1724 :type userid: Optional(str or int), Default: None
1725 1725
1726 1726 Example error output:
1727 1727
1728 1728 .. code-block:: bash
1729 1729
1730 1730 {
1731 1731 "id" : <id_given_in_input>,
1732 1732 "result" : [
1733 1733 {
1734 1734 "comment_author": <USER_DETAILS>,
1735 1735 "comment_created_on": "2017-02-01T14:38:16.309",
1736 1736 "comment_f_path": "file.txt",
1737 1737 "comment_id": 282,
1738 1738 "comment_lineno": "n1",
1739 1739 "comment_resolved_by": null,
1740 1740 "comment_status": [],
1741 1741 "comment_text": "This file needs a header",
1742 1742 "comment_type": "todo",
1743 1743 "comment_last_version: 0
1744 1744 }
1745 1745 ],
1746 1746 "error" : null
1747 1747 }
1748 1748
1749 1749 """
1750 1750 repo = get_repo_or_error(repoid)
1751 1751 if not has_superadmin_permission(apiuser):
1752 1752 _perms = ('repository.read', 'repository.write', 'repository.admin')
1753 1753 validate_repo_permissions(apiuser, repoid, repo, _perms)
1754 1754
1755 1755 commit_id = Optional.extract(commit_id)
1756 1756
1757 1757 userid = Optional.extract(userid)
1758 1758 if userid:
1759 1759 user = get_user_or_error(userid)
1760 1760 else:
1761 1761 user = None
1762 1762
1763 1763 comment_type = Optional.extract(comment_type)
1764 1764 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1765 1765 raise JSONRPCError(
1766 1766 'comment_type must be one of `{}` got {}'.format(
1767 1767 ChangesetComment.COMMENT_TYPES, comment_type)
1768 1768 )
1769 1769
1770 1770 comments = CommentsModel().get_repository_comments(
1771 1771 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1772 1772 return comments
1773 1773
1774 1774
1775 1775 @jsonrpc_method()
1776 1776 def get_comment(request, apiuser, comment_id):
1777 1777 """
1778 1778 Get single comment from repository or pull_request
1779 1779
1780 1780 :param apiuser: This is filled automatically from the |authtoken|.
1781 1781 :type apiuser: AuthUser
1782 1782 :param comment_id: comment id found in the URL of comment
1783 1783 :type comment_id: str or int
1784 1784
1785 1785 Example error output:
1786 1786
1787 1787 .. code-block:: bash
1788 1788
1789 1789 {
1790 1790 "id" : <id_given_in_input>,
1791 1791 "result" : {
1792 1792 "comment_author": <USER_DETAILS>,
1793 1793 "comment_created_on": "2017-02-01T14:38:16.309",
1794 1794 "comment_f_path": "file.txt",
1795 1795 "comment_id": 282,
1796 1796 "comment_lineno": "n1",
1797 1797 "comment_resolved_by": null,
1798 1798 "comment_status": [],
1799 1799 "comment_text": "This file needs a header",
1800 1800 "comment_type": "todo",
1801 1801 "comment_last_version: 0
1802 1802 },
1803 1803 "error" : null
1804 1804 }
1805 1805
1806 1806 """
1807 1807
1808 1808 comment = ChangesetComment.get(comment_id)
1809 1809 if not comment:
1810 1810 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1811 1811
1812 1812 perms = ('repository.read', 'repository.write', 'repository.admin')
1813 1813 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1814 1814 (user=apiuser, repo_name=comment.repo.repo_name)
1815 1815
1816 1816 if not has_comment_perm:
1817 1817 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1818 1818
1819 1819 return comment
1820 1820
1821 1821
1822 1822 @jsonrpc_method()
1823 1823 def edit_comment(request, apiuser, message, comment_id, version,
1824 1824 userid=Optional(OAttr('apiuser'))):
1825 1825 """
1826 1826 Edit comment on the pull request or commit,
1827 1827 specified by the `comment_id` and version. Initially version should be 0
1828 1828
1829 1829 :param apiuser: This is filled automatically from the |authtoken|.
1830 1830 :type apiuser: AuthUser
1831 1831 :param comment_id: Specify the comment_id for editing
1832 1832 :type comment_id: int
1833 1833 :param version: version of the comment that will be created, starts from 0
1834 1834 :type version: int
1835 1835 :param message: The text content of the comment.
1836 1836 :type message: str
1837 1837 :param userid: Comment on the pull request as this user
1838 1838 :type userid: Optional(str or int)
1839 1839
1840 1840 Example output:
1841 1841
1842 1842 .. code-block:: bash
1843 1843
1844 1844 id : <id_given_in_input>
1845 1845 result : {
1846 1846 "comment": "<comment data>",
1847 1847 "version": "<Integer>",
1848 1848 },
1849 1849 error : null
1850 1850 """
1851 1851
1852 1852 auth_user = apiuser
1853 1853 comment = ChangesetComment.get(comment_id)
1854 1854 if not comment:
1855 1855 raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1856 1856
1857 1857 is_super_admin = has_superadmin_permission(apiuser)
1858 1858 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1859 1859 (user=apiuser, repo_name=comment.repo.repo_name)
1860 1860
1861 1861 if not isinstance(userid, Optional):
1862 1862 if is_super_admin or is_repo_admin:
1863 1863 apiuser = get_user_or_error(userid)
1864 1864 auth_user = apiuser.AuthUser()
1865 1865 else:
1866 1866 raise JSONRPCError('userid is not the same as your user')
1867 1867
1868 1868 comment_author = comment.author.user_id == auth_user.user_id
1869 1869 if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1870 1870 raise JSONRPCError("you don't have access to edit this comment")
1871 1871
1872 1872 try:
1873 1873 comment_history = CommentsModel().edit(
1874 1874 comment_id=comment_id,
1875 1875 text=message,
1876 1876 auth_user=auth_user,
1877 1877 version=version,
1878 1878 )
1879 1879 Session().commit()
1880 1880 except CommentVersionMismatch:
1881 1881 raise JSONRPCError(
1882 1882 'comment ({}) version ({}) mismatch'.format(comment_id, version)
1883 1883 )
1884 1884 if not comment_history and not message:
1885 1885 raise JSONRPCError(
1886 1886 "comment ({}) can't be changed with empty string".format(comment_id)
1887 1887 )
1888 1888
1889 1889 if comment.pull_request:
1890 1890 pull_request = comment.pull_request
1891 1891 PullRequestModel().trigger_pull_request_hook(
1892 1892 pull_request, apiuser, 'comment_edit',
1893 1893 data={'comment': comment})
1894 1894 else:
1895 1895 db_repo = comment.repo
1896 1896 commit_id = comment.revision
1897 1897 commit = db_repo.get_commit(commit_id)
1898 1898 CommentsModel().trigger_commit_comment_hook(
1899 1899 db_repo, apiuser, 'edit',
1900 1900 data={'comment': comment, 'commit': commit})
1901 1901
1902 1902 data = {
1903 1903 'comment': comment,
1904 1904 'version': comment_history.version if comment_history else None,
1905 1905 }
1906 1906 return data
1907 1907
1908 1908
1909 1909 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1910 1910 # @jsonrpc_method()
1911 1911 # def delete_comment(request, apiuser, comment_id):
1912 1912 # auth_user = apiuser
1913 1913 #
1914 1914 # comment = ChangesetComment.get(comment_id)
1915 1915 # if not comment:
1916 1916 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1917 1917 #
1918 1918 # is_super_admin = has_superadmin_permission(apiuser)
1919 1919 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1920 1920 # (user=apiuser, repo_name=comment.repo.repo_name)
1921 1921 #
1922 1922 # comment_author = comment.author.user_id == auth_user.user_id
1923 1923 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1924 1924 # raise JSONRPCError("you don't have access to edit this comment")
1925 1925
1926 1926 @jsonrpc_method()
1927 1927 def grant_user_permission(request, apiuser, repoid, userid, perm):
1928 1928 """
1929 1929 Grant permissions for the specified user on the given repository,
1930 1930 or update existing permissions if found.
1931 1931
1932 1932 This command can only be run using an |authtoken| with admin
1933 1933 permissions on the |repo|.
1934 1934
1935 1935 :param apiuser: This is filled automatically from the |authtoken|.
1936 1936 :type apiuser: AuthUser
1937 1937 :param repoid: Set the repository name or repository ID.
1938 1938 :type repoid: str or int
1939 1939 :param userid: Set the user name.
1940 1940 :type userid: str
1941 1941 :param perm: Set the user permissions, using the following format
1942 1942 ``(repository.(none|read|write|admin))``
1943 1943 :type perm: str
1944 1944
1945 1945 Example output:
1946 1946
1947 1947 .. code-block:: bash
1948 1948
1949 1949 id : <id_given_in_input>
1950 1950 result: {
1951 1951 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1952 1952 "success": true
1953 1953 }
1954 1954 error: null
1955 1955 """
1956 1956
1957 1957 repo = get_repo_or_error(repoid)
1958 1958 user = get_user_or_error(userid)
1959 1959 perm = get_perm_or_error(perm)
1960 1960 if not has_superadmin_permission(apiuser):
1961 1961 _perms = ('repository.admin',)
1962 1962 validate_repo_permissions(apiuser, repoid, repo, _perms)
1963 1963
1964 1964 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1965 1965 try:
1966 1966 changes = RepoModel().update_permissions(
1967 1967 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1968 1968
1969 1969 action_data = {
1970 1970 'added': changes['added'],
1971 1971 'updated': changes['updated'],
1972 1972 'deleted': changes['deleted'],
1973 1973 }
1974 1974 audit_logger.store_api(
1975 1975 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1976 1976 Session().commit()
1977 1977 PermissionModel().flush_user_permission_caches(changes)
1978 1978
1979 1979 return {
1980 1980 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % (
1981 1981 perm.permission_name, user.username, repo.repo_name
1982 1982 ),
1983 1983 'success': True
1984 1984 }
1985 1985 except Exception:
1986 1986 log.exception("Exception occurred while trying edit permissions for repo")
1987 1987 raise JSONRPCError(
1988 1988 'failed to edit permission for user: `%s` in repo: `%s`' % (
1989 1989 userid, repoid
1990 1990 )
1991 1991 )
1992 1992
1993 1993
1994 1994 @jsonrpc_method()
1995 1995 def revoke_user_permission(request, apiuser, repoid, userid):
1996 1996 """
1997 1997 Revoke permission for a user on the specified repository.
1998 1998
1999 1999 This command can only be run using an |authtoken| with admin
2000 2000 permissions on the |repo|.
2001 2001
2002 2002 :param apiuser: This is filled automatically from the |authtoken|.
2003 2003 :type apiuser: AuthUser
2004 2004 :param repoid: Set the repository name or repository ID.
2005 2005 :type repoid: str or int
2006 2006 :param userid: Set the user name of revoked user.
2007 2007 :type userid: str or int
2008 2008
2009 2009 Example error output:
2010 2010
2011 2011 .. code-block:: bash
2012 2012
2013 2013 id : <id_given_in_input>
2014 2014 result: {
2015 2015 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2016 2016 "success": true
2017 2017 }
2018 2018 error: null
2019 2019 """
2020 2020
2021 2021 repo = get_repo_or_error(repoid)
2022 2022 user = get_user_or_error(userid)
2023 2023 if not has_superadmin_permission(apiuser):
2024 2024 _perms = ('repository.admin',)
2025 2025 validate_repo_permissions(apiuser, repoid, repo, _perms)
2026 2026
2027 2027 perm_deletions = [[user.user_id, None, "user"]]
2028 2028 try:
2029 2029 changes = RepoModel().update_permissions(
2030 2030 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2031 2031
2032 2032 action_data = {
2033 2033 'added': changes['added'],
2034 2034 'updated': changes['updated'],
2035 2035 'deleted': changes['deleted'],
2036 2036 }
2037 2037 audit_logger.store_api(
2038 2038 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2039 2039 Session().commit()
2040 2040 PermissionModel().flush_user_permission_caches(changes)
2041 2041
2042 2042 return {
2043 2043 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % (
2044 2044 user.username, repo.repo_name
2045 2045 ),
2046 2046 'success': True
2047 2047 }
2048 2048 except Exception:
2049 2049 log.exception("Exception occurred while trying revoke permissions to repo")
2050 2050 raise JSONRPCError(
2051 2051 'failed to edit permission for user: `%s` in repo: `%s`' % (
2052 2052 userid, repoid
2053 2053 )
2054 2054 )
2055 2055
2056 2056
2057 2057 @jsonrpc_method()
2058 2058 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2059 2059 """
2060 2060 Grant permission for a user group on the specified repository,
2061 2061 or update existing permissions.
2062 2062
2063 2063 This command can only be run using an |authtoken| with admin
2064 2064 permissions on the |repo|.
2065 2065
2066 2066 :param apiuser: This is filled automatically from the |authtoken|.
2067 2067 :type apiuser: AuthUser
2068 2068 :param repoid: Set the repository name or repository ID.
2069 2069 :type repoid: str or int
2070 2070 :param usergroupid: Specify the ID of the user group.
2071 2071 :type usergroupid: str or int
2072 2072 :param perm: Set the user group permissions using the following
2073 2073 format: (repository.(none|read|write|admin))
2074 2074 :type perm: str
2075 2075
2076 2076 Example output:
2077 2077
2078 2078 .. code-block:: bash
2079 2079
2080 2080 id : <id_given_in_input>
2081 2081 result : {
2082 2082 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2083 2083 "success": true
2084 2084
2085 2085 }
2086 2086 error : null
2087 2087
2088 2088 Example error output:
2089 2089
2090 2090 .. code-block:: bash
2091 2091
2092 2092 id : <id_given_in_input>
2093 2093 result : null
2094 2094 error : {
2095 2095 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2096 2096 }
2097 2097
2098 2098 """
2099 2099
2100 2100 repo = get_repo_or_error(repoid)
2101 2101 perm = get_perm_or_error(perm)
2102 2102 if not has_superadmin_permission(apiuser):
2103 2103 _perms = ('repository.admin',)
2104 2104 validate_repo_permissions(apiuser, repoid, repo, _perms)
2105 2105
2106 2106 user_group = get_user_group_or_error(usergroupid)
2107 2107 if not has_superadmin_permission(apiuser):
2108 2108 # check if we have at least read permission for this user group !
2109 2109 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2110 2110 if not HasUserGroupPermissionAnyApi(*_perms)(
2111 2111 user=apiuser, user_group_name=user_group.users_group_name):
2112 2112 raise JSONRPCError(
2113 2113 'user group `%s` does not exist' % (usergroupid,))
2114 2114
2115 2115 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2116 2116 try:
2117 2117 changes = RepoModel().update_permissions(
2118 2118 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2119 2119 action_data = {
2120 2120 'added': changes['added'],
2121 2121 'updated': changes['updated'],
2122 2122 'deleted': changes['deleted'],
2123 2123 }
2124 2124 audit_logger.store_api(
2125 2125 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2126 2126 Session().commit()
2127 2127 PermissionModel().flush_user_permission_caches(changes)
2128 2128
2129 2129 return {
2130 2130 'msg': 'Granted perm: `%s` for user group: `%s` in '
2131 2131 'repo: `%s`' % (
2132 2132 perm.permission_name, user_group.users_group_name,
2133 2133 repo.repo_name
2134 2134 ),
2135 2135 'success': True
2136 2136 }
2137 2137 except Exception:
2138 2138 log.exception(
2139 2139 "Exception occurred while trying change permission on repo")
2140 2140 raise JSONRPCError(
2141 2141 'failed to edit permission for user group: `%s` in '
2142 2142 'repo: `%s`' % (
2143 2143 usergroupid, repo.repo_name
2144 2144 )
2145 2145 )
2146 2146
2147 2147
2148 2148 @jsonrpc_method()
2149 2149 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2150 2150 """
2151 2151 Revoke the permissions of a user group on a given repository.
2152 2152
2153 2153 This command can only be run using an |authtoken| with admin
2154 2154 permissions on the |repo|.
2155 2155
2156 2156 :param apiuser: This is filled automatically from the |authtoken|.
2157 2157 :type apiuser: AuthUser
2158 2158 :param repoid: Set the repository name or repository ID.
2159 2159 :type repoid: str or int
2160 2160 :param usergroupid: Specify the user group ID.
2161 2161 :type usergroupid: str or int
2162 2162
2163 2163 Example output:
2164 2164
2165 2165 .. code-block:: bash
2166 2166
2167 2167 id : <id_given_in_input>
2168 2168 result: {
2169 2169 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2170 2170 "success": true
2171 2171 }
2172 2172 error: null
2173 2173 """
2174 2174
2175 2175 repo = get_repo_or_error(repoid)
2176 2176 if not has_superadmin_permission(apiuser):
2177 2177 _perms = ('repository.admin',)
2178 2178 validate_repo_permissions(apiuser, repoid, repo, _perms)
2179 2179
2180 2180 user_group = get_user_group_or_error(usergroupid)
2181 2181 if not has_superadmin_permission(apiuser):
2182 2182 # check if we have at least read permission for this user group !
2183 2183 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2184 2184 if not HasUserGroupPermissionAnyApi(*_perms)(
2185 2185 user=apiuser, user_group_name=user_group.users_group_name):
2186 2186 raise JSONRPCError(
2187 2187 'user group `%s` does not exist' % (usergroupid,))
2188 2188
2189 2189 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2190 2190 try:
2191 2191 changes = RepoModel().update_permissions(
2192 2192 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2193 2193 action_data = {
2194 2194 'added': changes['added'],
2195 2195 'updated': changes['updated'],
2196 2196 'deleted': changes['deleted'],
2197 2197 }
2198 2198 audit_logger.store_api(
2199 2199 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2200 2200 Session().commit()
2201 2201 PermissionModel().flush_user_permission_caches(changes)
2202 2202
2203 2203 return {
2204 2204 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % (
2205 2205 user_group.users_group_name, repo.repo_name
2206 2206 ),
2207 2207 'success': True
2208 2208 }
2209 2209 except Exception:
2210 2210 log.exception("Exception occurred while trying revoke "
2211 2211 "user group permission on repo")
2212 2212 raise JSONRPCError(
2213 2213 'failed to edit permission for user group: `%s` in '
2214 2214 'repo: `%s`' % (
2215 2215 user_group.users_group_name, repo.repo_name
2216 2216 )
2217 2217 )
2218 2218
2219 2219
2220 2220 @jsonrpc_method()
2221 2221 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2222 2222 """
2223 2223 Triggers a pull on the given repository from a remote location. You
2224 2224 can use this to keep remote repositories up-to-date.
2225 2225
2226 2226 This command can only be run using an |authtoken| with admin
2227 2227 rights to the specified repository. For more information,
2228 2228 see :ref:`config-token-ref`.
2229 2229
2230 2230 This command takes the following options:
2231 2231
2232 2232 :param apiuser: This is filled automatically from the |authtoken|.
2233 2233 :type apiuser: AuthUser
2234 2234 :param repoid: The repository name or repository ID.
2235 2235 :type repoid: str or int
2236 2236 :param remote_uri: Optional remote URI to pass in for pull
2237 2237 :type remote_uri: str
2238 2238
2239 2239 Example output:
2240 2240
2241 2241 .. code-block:: bash
2242 2242
2243 2243 id : <id_given_in_input>
2244 2244 result : {
2245 2245 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2246 2246 "repository": "<repository name>"
2247 2247 }
2248 2248 error : null
2249 2249
2250 2250 Example error output:
2251 2251
2252 2252 .. code-block:: bash
2253 2253
2254 2254 id : <id_given_in_input>
2255 2255 result : null
2256 2256 error : {
2257 2257 "Unable to push changes from `<remote_url>`"
2258 2258 }
2259 2259
2260 2260 """
2261 2261
2262 2262 repo = get_repo_or_error(repoid)
2263 2263 remote_uri = Optional.extract(remote_uri)
2264 2264 remote_uri_display = remote_uri or repo.clone_uri_hidden
2265 2265 if not has_superadmin_permission(apiuser):
2266 2266 _perms = ('repository.admin',)
2267 2267 validate_repo_permissions(apiuser, repoid, repo, _perms)
2268 2268
2269 2269 try:
2270 2270 ScmModel().pull_changes(
2271 2271 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2272 2272 return {
2273 2273 'msg': 'Pulled from url `%s` on repo `%s`' % (
2274 2274 remote_uri_display, repo.repo_name),
2275 2275 'repository': repo.repo_name
2276 2276 }
2277 2277 except Exception:
2278 2278 log.exception("Exception occurred while trying to "
2279 2279 "pull changes from remote location")
2280 2280 raise JSONRPCError(
2281 2281 'Unable to pull changes from `%s`' % remote_uri_display
2282 2282 )
2283 2283
2284 2284
2285 2285 @jsonrpc_method()
2286 2286 def strip(request, apiuser, repoid, revision, branch):
2287 2287 """
2288 2288 Strips the given revision from the specified repository.
2289 2289
2290 2290 * This will remove the revision and all of its decendants.
2291 2291
2292 2292 This command can only be run using an |authtoken| with admin rights to
2293 2293 the specified repository.
2294 2294
2295 2295 This command takes the following options:
2296 2296
2297 2297 :param apiuser: This is filled automatically from the |authtoken|.
2298 2298 :type apiuser: AuthUser
2299 2299 :param repoid: The repository name or repository ID.
2300 2300 :type repoid: str or int
2301 2301 :param revision: The revision you wish to strip.
2302 2302 :type revision: str
2303 2303 :param branch: The branch from which to strip the revision.
2304 2304 :type branch: str
2305 2305
2306 2306 Example output:
2307 2307
2308 2308 .. code-block:: bash
2309 2309
2310 2310 id : <id_given_in_input>
2311 2311 result : {
2312 2312 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2313 2313 "repository": "<repository name>"
2314 2314 }
2315 2315 error : null
2316 2316
2317 2317 Example error output:
2318 2318
2319 2319 .. code-block:: bash
2320 2320
2321 2321 id : <id_given_in_input>
2322 2322 result : null
2323 2323 error : {
2324 2324 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2325 2325 }
2326 2326
2327 2327 """
2328 2328
2329 2329 repo = get_repo_or_error(repoid)
2330 2330 if not has_superadmin_permission(apiuser):
2331 2331 _perms = ('repository.admin',)
2332 2332 validate_repo_permissions(apiuser, repoid, repo, _perms)
2333 2333
2334 2334 try:
2335 2335 ScmModel().strip(repo, revision, branch)
2336 2336 audit_logger.store_api(
2337 2337 'repo.commit.strip', action_data={'commit_id': revision},
2338 2338 repo=repo,
2339 2339 user=apiuser, commit=True)
2340 2340
2341 2341 return {
2342 2342 'msg': 'Stripped commit %s from repo `%s`' % (
2343 2343 revision, repo.repo_name),
2344 2344 'repository': repo.repo_name
2345 2345 }
2346 2346 except Exception:
2347 2347 log.exception("Exception while trying to strip")
2348 2348 raise JSONRPCError(
2349 2349 'Unable to strip commit %s from repo `%s`' % (
2350 2350 revision, repo.repo_name)
2351 2351 )
2352 2352
2353 2353
2354 2354 @jsonrpc_method()
2355 2355 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2356 2356 """
2357 2357 Returns all settings for a repository. If key is given it only returns the
2358 2358 setting identified by the key or null.
2359 2359
2360 2360 :param apiuser: This is filled automatically from the |authtoken|.
2361 2361 :type apiuser: AuthUser
2362 2362 :param repoid: The repository name or repository id.
2363 2363 :type repoid: str or int
2364 2364 :param key: Key of the setting to return.
2365 2365 :type: key: Optional(str)
2366 2366
2367 2367 Example output:
2368 2368
2369 2369 .. code-block:: bash
2370 2370
2371 2371 {
2372 2372 "error": null,
2373 2373 "id": 237,
2374 2374 "result": {
2375 2375 "extensions_largefiles": true,
2376 2376 "extensions_evolve": true,
2377 2377 "hooks_changegroup_push_logger": true,
2378 2378 "hooks_changegroup_repo_size": false,
2379 2379 "hooks_outgoing_pull_logger": true,
2380 2380 "phases_publish": "True",
2381 2381 "rhodecode_hg_use_rebase_for_merging": true,
2382 2382 "rhodecode_pr_merge_enabled": true,
2383 2383 "rhodecode_use_outdated_comments": true
2384 2384 }
2385 2385 }
2386 2386 """
2387 2387
2388 2388 # Restrict access to this api method to super-admins, and repo admins only.
2389 2389 repo = get_repo_or_error(repoid)
2390 2390 if not has_superadmin_permission(apiuser):
2391 2391 _perms = ('repository.admin',)
2392 2392 validate_repo_permissions(apiuser, repoid, repo, _perms)
2393 2393
2394 2394 try:
2395 2395 settings_model = VcsSettingsModel(repo=repo)
2396 2396 settings = settings_model.get_global_settings()
2397 2397 settings.update(settings_model.get_repo_settings())
2398 2398
2399 2399 # If only a single setting is requested fetch it from all settings.
2400 2400 key = Optional.extract(key)
2401 2401 if key is not None:
2402 2402 settings = settings.get(key, None)
2403 2403 except Exception:
2404 2404 msg = 'Failed to fetch settings for repository `{}`'.format(repoid)
2405 2405 log.exception(msg)
2406 2406 raise JSONRPCError(msg)
2407 2407
2408 2408 return settings
2409 2409
2410 2410
2411 2411 @jsonrpc_method()
2412 2412 def set_repo_settings(request, apiuser, repoid, settings):
2413 2413 """
2414 2414 Update repository settings. Returns true on success.
2415 2415
2416 2416 :param apiuser: This is filled automatically from the |authtoken|.
2417 2417 :type apiuser: AuthUser
2418 2418 :param repoid: The repository name or repository id.
2419 2419 :type repoid: str or int
2420 2420 :param settings: The new settings for the repository.
2421 2421 :type: settings: dict
2422 2422
2423 2423 Example output:
2424 2424
2425 2425 .. code-block:: bash
2426 2426
2427 2427 {
2428 2428 "error": null,
2429 2429 "id": 237,
2430 2430 "result": true
2431 2431 }
2432 2432 """
2433 2433 # Restrict access to this api method to super-admins, and repo admins only.
2434 2434 repo = get_repo_or_error(repoid)
2435 2435 if not has_superadmin_permission(apiuser):
2436 2436 _perms = ('repository.admin',)
2437 2437 validate_repo_permissions(apiuser, repoid, repo, _perms)
2438 2438
2439 2439 if type(settings) is not dict:
2440 2440 raise JSONRPCError('Settings have to be a JSON Object.')
2441 2441
2442 2442 try:
2443 2443 settings_model = VcsSettingsModel(repo=repoid)
2444 2444
2445 2445 # Merge global, repo and incoming settings.
2446 2446 new_settings = settings_model.get_global_settings()
2447 2447 new_settings.update(settings_model.get_repo_settings())
2448 2448 new_settings.update(settings)
2449 2449
2450 2450 # Update the settings.
2451 2451 inherit_global_settings = new_settings.get(
2452 2452 'inherit_global_settings', False)
2453 2453 settings_model.create_or_update_repo_settings(
2454 2454 new_settings, inherit_global_settings=inherit_global_settings)
2455 2455 Session().commit()
2456 2456 except Exception:
2457 2457 msg = 'Failed to update settings for repository `{}`'.format(repoid)
2458 2458 log.exception(msg)
2459 2459 raise JSONRPCError(msg)
2460 2460
2461 2461 # Indicate success.
2462 2462 return True
2463 2463
2464 2464
2465 2465 @jsonrpc_method()
2466 2466 def maintenance(request, apiuser, repoid):
2467 2467 """
2468 2468 Triggers a maintenance on the given repository.
2469 2469
2470 2470 This command can only be run using an |authtoken| with admin
2471 2471 rights to the specified repository. For more information,
2472 2472 see :ref:`config-token-ref`.
2473 2473
2474 2474 This command takes the following options:
2475 2475
2476 2476 :param apiuser: This is filled automatically from the |authtoken|.
2477 2477 :type apiuser: AuthUser
2478 2478 :param repoid: The repository name or repository ID.
2479 2479 :type repoid: str or int
2480 2480
2481 2481 Example output:
2482 2482
2483 2483 .. code-block:: bash
2484 2484
2485 2485 id : <id_given_in_input>
2486 2486 result : {
2487 2487 "msg": "executed maintenance command",
2488 2488 "executed_actions": [
2489 2489 <action_message>, <action_message2>...
2490 2490 ],
2491 2491 "repository": "<repository name>"
2492 2492 }
2493 2493 error : null
2494 2494
2495 2495 Example error output:
2496 2496
2497 2497 .. code-block:: bash
2498 2498
2499 2499 id : <id_given_in_input>
2500 2500 result : null
2501 2501 error : {
2502 2502 "Unable to execute maintenance on `<reponame>`"
2503 2503 }
2504 2504
2505 2505 """
2506 2506
2507 2507 repo = get_repo_or_error(repoid)
2508 2508 if not has_superadmin_permission(apiuser):
2509 2509 _perms = ('repository.admin',)
2510 2510 validate_repo_permissions(apiuser, repoid, repo, _perms)
2511 2511
2512 2512 try:
2513 2513 maintenance = repo_maintenance.RepoMaintenance()
2514 2514 executed_actions = maintenance.execute(repo)
2515 2515
2516 2516 return {
2517 2517 'msg': 'executed maintenance command',
2518 2518 'executed_actions': executed_actions,
2519 2519 'repository': repo.repo_name
2520 2520 }
2521 2521 except Exception:
2522 2522 log.exception("Exception occurred while trying to run maintenance")
2523 2523 raise JSONRPCError(
2524 2524 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,269 +1,270 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import time
23 23 import errno
24 24 import hashlib
25 25
26 26 from rhodecode.lib.ext_json import json
27 27 from rhodecode.apps.file_store import utils
28 28 from rhodecode.apps.file_store.extensions import resolve_extensions
29 29 from rhodecode.apps.file_store.exceptions import (
30 30 FileNotAllowedException, FileOverSizeException)
31 31
32 32 METADATA_VER = 'v1'
33 33
34 34
35 35 def safe_make_dirs(dir_path):
36 36 if not os.path.exists(dir_path):
37 37 try:
38 38 os.makedirs(dir_path)
39 39 except OSError as e:
40 40 if e.errno != errno.EEXIST:
41 41 raise
42 42 return
43 43
44 44
45 45 class LocalFileStorage(object):
46 46
47 47 @classmethod
48 48 def apply_counter(cls, counter, filename):
49 49 name_counted = '%d-%s' % (counter, filename)
50 50 return name_counted
51 51
52 52 @classmethod
53 53 def resolve_name(cls, name, directory):
54 54 """
55 55 Resolves a unique name and the correct path. If a filename
56 56 for that path already exists then a numeric prefix with values > 0 will be
57 57 added, for example test.jpg -> 1-test.jpg etc. initially file would have 0 prefix.
58 58
59 59 :param name: base name of file
60 60 :param directory: absolute directory path
61 61 """
62 62
63 63 counter = 0
64 64 while True:
65 65 name_counted = cls.apply_counter(counter, name)
66 66
67 67 # sub_store prefix to optimize disk usage, e.g some_path/ab/final_file
68 68 sub_store = cls._sub_store_from_filename(name_counted)
69 69 sub_store_path = os.path.join(directory, sub_store)
70 70 safe_make_dirs(sub_store_path)
71 71
72 72 path = os.path.join(sub_store_path, name_counted)
73 73 if not os.path.exists(path):
74 74 return name_counted, path
75 75 counter += 1
76 76
77 77 @classmethod
78 78 def _sub_store_from_filename(cls, filename):
79 79 return filename[:2]
80 80
81 81 @classmethod
82 82 def calculate_path_hash(cls, file_path):
83 83 """
84 84 Efficient calculation of file_path sha256 sum
85 85
86 86 :param file_path:
87 87 :return: sha256sum
88 88 """
89 89 digest = hashlib.sha256()
90 90 with open(file_path, 'rb') as f:
91 91 for chunk in iter(lambda: f.read(1024 * 100), b""):
92 92 digest.update(chunk)
93 93
94 94 return digest.hexdigest()
95 95
96 96 def __init__(self, base_path, extension_groups=None):
97 97
98 98 """
99 99 Local file storage
100 100
101 101 :param base_path: the absolute base path where uploads are stored
102 102 :param extension_groups: extensions string
103 103 """
104 104
105 105 extension_groups = extension_groups or ['any']
106 106 self.base_path = base_path
107 107 self.extensions = resolve_extensions([], groups=extension_groups)
108 108
109 109 def __repr__(self):
110 110 return '{}@{}'.format(self.__class__, self.base_path)
111 111
112 112 def store_path(self, filename):
113 113 """
114 114 Returns absolute file path of the filename, joined to the
115 115 base_path.
116 116
117 117 :param filename: base name of file
118 118 """
119 119 prefix_dir = ''
120 120 if '/' in filename:
121 121 prefix_dir, filename = filename.split('/')
122 122 sub_store = self._sub_store_from_filename(filename)
123 123 else:
124 124 sub_store = self._sub_store_from_filename(filename)
125 125 return os.path.join(self.base_path, prefix_dir, sub_store, filename)
126 126
127 127 def delete(self, filename):
128 128 """
129 129 Deletes the filename. Filename is resolved with the
130 130 absolute path based on base_path. If file does not exist,
131 131 returns **False**, otherwise **True**
132 132
133 133 :param filename: base name of file
134 134 """
135 135 if self.exists(filename):
136 136 os.remove(self.store_path(filename))
137 137 return True
138 138 return False
139 139
140 140 def exists(self, filename):
141 141 """
142 142 Checks if file exists. Resolves filename's absolute
143 143 path based on base_path.
144 144
145 145 :param filename: file_uid name of file, e.g 0-f62b2b2d-9708-4079-a071-ec3f958448d4.svg
146 146 """
147 147 return os.path.exists(self.store_path(filename))
148 148
149 149 def filename_allowed(self, filename, extensions=None):
150 150 """Checks if a filename has an allowed extension
151 151
152 152 :param filename: base name of file
153 153 :param extensions: iterable of extensions (or self.extensions)
154 154 """
155 155 _, ext = os.path.splitext(filename)
156 156 return self.extension_allowed(ext, extensions)
157 157
158 158 def extension_allowed(self, ext, extensions=None):
159 159 """
160 160 Checks if an extension is permitted. Both e.g. ".jpg" and
161 161 "jpg" can be passed in. Extension lookup is case-insensitive.
162 162
163 163 :param ext: extension to check
164 164 :param extensions: iterable of extensions to validate against (or self.extensions)
165 165 """
166 166 def normalize_ext(_ext):
167 167 if _ext.startswith('.'):
168 168 _ext = _ext[1:]
169 169 return _ext.lower()
170 170
171 171 extensions = extensions or self.extensions
172 172 if not extensions:
173 173 return True
174 174
175 175 ext = normalize_ext(ext)
176 176
177 177 return ext in [normalize_ext(x) for x in extensions]
178 178
179 179 def save_file(self, file_obj, filename, directory=None, extensions=None,
180 180 extra_metadata=None, max_filesize=None, randomized_name=True, **kwargs):
181 181 """
182 182 Saves a file object to the uploads location.
183 183 Returns the resolved filename, i.e. the directory +
184 184 the (randomized/incremented) base name.
185 185
186 186 :param file_obj: **cgi.FieldStorage** object (or similar)
187 187 :param filename: original filename
188 188 :param directory: relative path of sub-directory
189 189 :param extensions: iterable of allowed extensions, if not default
190 190 :param max_filesize: maximum size of file that should be allowed
191 191 :param randomized_name: generate random generated UID or fixed based on the filename
192 192 :param extra_metadata: extra JSON metadata to store next to the file with .meta suffix
193 193
194 194 """
195 195
196 196 extensions = extensions or self.extensions
197 197
198 198 if not self.filename_allowed(filename, extensions):
199 199 raise FileNotAllowedException()
200 200
201 201 if directory:
202 202 dest_directory = os.path.join(self.base_path, directory)
203 203 else:
204 204 dest_directory = self.base_path
205 205
206 206 safe_make_dirs(dest_directory)
207 207
208 208 uid_filename = utils.uid_filename(filename, randomized=randomized_name)
209 209
210 210 # resolve also produces special sub-dir for file optimized store
211 211 filename, path = self.resolve_name(uid_filename, dest_directory)
212 212 stored_file_dir = os.path.dirname(path)
213 213
214 214 no_body_seek = kwargs.pop('no_body_seek', False)
215 215 if no_body_seek:
216 216 pass
217 217 else:
218 218 file_obj.seek(0)
219 219
220 220 with open(path, "wb") as dest:
221 221 length = 256 * 1024
222 222 while 1:
223 223 buf = file_obj.read(length)
224 224 if not buf:
225 225 break
226 226 dest.write(buf)
227 227
228 228 metadata = {}
229 229 if extra_metadata:
230 230 metadata = extra_metadata
231 231
232 232 size = os.stat(path).st_size
233 233
234 234 if max_filesize and size > max_filesize:
235 235 # free up the copied file, and raise exc
236 236 os.remove(path)
237 237 raise FileOverSizeException()
238 238
239 239 file_hash = self.calculate_path_hash(path)
240 240
241 241 metadata.update({
242 242 "filename": filename,
243 243 "size": size,
244 244 "time": time.time(),
245 245 "sha256": file_hash,
246 246 "meta_ver": METADATA_VER
247 247 })
248 248
249 249 filename_meta = filename + '.meta'
250 250 with open(os.path.join(stored_file_dir, filename_meta), "wb") as dest_meta:
251 251 dest_meta.write(json.dumps(metadata))
252 252
253 253 if directory:
254 254 filename = os.path.join(directory, filename)
255 255
256 256 return filename, metadata
257 257
258 def get_metadata(self, filename):
258 def get_metadata(self, filename, ignore_missing=False):
259 259 """
260 260 Reads JSON stored metadata for a file
261 261
262 262 :param filename:
263 263 :return:
264 264 """
265 265 filename = self.store_path(filename)
266 266 filename_meta = filename + '.meta'
267
267 if ignore_missing and not os.path.isfile(filename_meta):
268 return {}
268 269 with open(filename_meta, "rb") as source_meta:
269 270 return json.loads(source_meta.read())
@@ -1,1227 +1,1227 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 from rhodecode.apps._base import add_route_with_slash
21 21
22 22
23 23 def includeme(config):
24 24 from rhodecode.apps.repository.views.repo_artifacts import RepoArtifactsView
25 25 from rhodecode.apps.repository.views.repo_audit_logs import AuditLogsView
26 26 from rhodecode.apps.repository.views.repo_automation import RepoAutomationView
27 27 from rhodecode.apps.repository.views.repo_bookmarks import RepoBookmarksView
28 28 from rhodecode.apps.repository.views.repo_branch_permissions import RepoSettingsBranchPermissionsView
29 29 from rhodecode.apps.repository.views.repo_branches import RepoBranchesView
30 30 from rhodecode.apps.repository.views.repo_caches import RepoCachesView
31 31 from rhodecode.apps.repository.views.repo_changelog import RepoChangelogView
32 32 from rhodecode.apps.repository.views.repo_checks import RepoChecksView
33 33 from rhodecode.apps.repository.views.repo_commits import RepoCommitsView
34 34 from rhodecode.apps.repository.views.repo_compare import RepoCompareView
35 35 from rhodecode.apps.repository.views.repo_feed import RepoFeedView
36 36 from rhodecode.apps.repository.views.repo_files import RepoFilesView
37 37 from rhodecode.apps.repository.views.repo_forks import RepoForksView
38 38 from rhodecode.apps.repository.views.repo_maintainance import RepoMaintenanceView
39 39 from rhodecode.apps.repository.views.repo_permissions import RepoSettingsPermissionsView
40 40 from rhodecode.apps.repository.views.repo_pull_requests import RepoPullRequestsView
41 41 from rhodecode.apps.repository.views.repo_review_rules import RepoReviewRulesView
42 42 from rhodecode.apps.repository.views.repo_settings import RepoSettingsView
43 43 from rhodecode.apps.repository.views.repo_settings_advanced import RepoSettingsAdvancedView
44 44 from rhodecode.apps.repository.views.repo_settings_fields import RepoSettingsFieldsView
45 45 from rhodecode.apps.repository.views.repo_settings_issue_trackers import RepoSettingsIssueTrackersView
46 46 from rhodecode.apps.repository.views.repo_settings_remote import RepoSettingsRemoteView
47 47 from rhodecode.apps.repository.views.repo_settings_vcs import RepoSettingsVcsView
48 48 from rhodecode.apps.repository.views.repo_strip import RepoStripView
49 49 from rhodecode.apps.repository.views.repo_summary import RepoSummaryView
50 50 from rhodecode.apps.repository.views.repo_tags import RepoTagsView
51 51
52 52 # repo creating checks, special cases that aren't repo routes
53 53 config.add_route(
54 54 name='repo_creating',
55 55 pattern='/{repo_name:.*?[^/]}/repo_creating')
56 56 config.add_view(
57 57 RepoChecksView,
58 58 attr='repo_creating',
59 59 route_name='repo_creating', request_method='GET',
60 60 renderer='rhodecode:templates/admin/repos/repo_creating.mako')
61 61
62 62 config.add_route(
63 63 name='repo_creating_check',
64 64 pattern='/{repo_name:.*?[^/]}/repo_creating_check')
65 65 config.add_view(
66 66 RepoChecksView,
67 67 attr='repo_creating_check',
68 68 route_name='repo_creating_check', request_method='GET',
69 69 renderer='json_ext')
70 70
71 71 # Summary
72 72 # NOTE(marcink): one additional route is defined in very bottom, catch
73 73 # all pattern
74 74 config.add_route(
75 75 name='repo_summary_explicit',
76 76 pattern='/{repo_name:.*?[^/]}/summary', repo_route=True)
77 77 config.add_view(
78 78 RepoSummaryView,
79 79 attr='summary',
80 80 route_name='repo_summary_explicit', request_method='GET',
81 81 renderer='rhodecode:templates/summary/summary.mako')
82 82
83 83 config.add_route(
84 84 name='repo_summary_commits',
85 85 pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True)
86 86 config.add_view(
87 87 RepoSummaryView,
88 88 attr='summary_commits',
89 89 route_name='repo_summary_commits', request_method='GET',
90 90 renderer='rhodecode:templates/summary/summary_commits.mako')
91 91
92 92 # Commits
93 93 config.add_route(
94 94 name='repo_commit',
95 95 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True)
96 96 config.add_view(
97 97 RepoCommitsView,
98 98 attr='repo_commit_show',
99 99 route_name='repo_commit', request_method='GET',
100 100 renderer=None)
101 101
102 102 config.add_route(
103 103 name='repo_commit_children',
104 104 pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True)
105 105 config.add_view(
106 106 RepoCommitsView,
107 107 attr='repo_commit_children',
108 108 route_name='repo_commit_children', request_method='GET',
109 109 renderer='json_ext', xhr=True)
110 110
111 111 config.add_route(
112 112 name='repo_commit_parents',
113 113 pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True)
114 114 config.add_view(
115 115 RepoCommitsView,
116 116 attr='repo_commit_parents',
117 117 route_name='repo_commit_parents', request_method='GET',
118 118 renderer='json_ext')
119 119
120 120 config.add_route(
121 121 name='repo_commit_raw',
122 122 pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True)
123 123 config.add_view(
124 124 RepoCommitsView,
125 125 attr='repo_commit_raw',
126 126 route_name='repo_commit_raw', request_method='GET',
127 127 renderer=None)
128 128
129 129 config.add_route(
130 130 name='repo_commit_patch',
131 131 pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True)
132 132 config.add_view(
133 133 RepoCommitsView,
134 134 attr='repo_commit_patch',
135 135 route_name='repo_commit_patch', request_method='GET',
136 136 renderer=None)
137 137
138 138 config.add_route(
139 139 name='repo_commit_download',
140 140 pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True)
141 141 config.add_view(
142 142 RepoCommitsView,
143 143 attr='repo_commit_download',
144 144 route_name='repo_commit_download', request_method='GET',
145 145 renderer=None)
146 146
147 147 config.add_route(
148 148 name='repo_commit_data',
149 149 pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True)
150 150 config.add_view(
151 151 RepoCommitsView,
152 152 attr='repo_commit_data',
153 153 route_name='repo_commit_data', request_method='GET',
154 154 renderer='json_ext', xhr=True)
155 155
156 156 config.add_route(
157 157 name='repo_commit_comment_create',
158 158 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True)
159 159 config.add_view(
160 160 RepoCommitsView,
161 161 attr='repo_commit_comment_create',
162 162 route_name='repo_commit_comment_create', request_method='POST',
163 163 renderer='json_ext')
164 164
165 165 config.add_route(
166 166 name='repo_commit_comment_preview',
167 167 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True)
168 168 config.add_view(
169 169 RepoCommitsView,
170 170 attr='repo_commit_comment_preview',
171 171 route_name='repo_commit_comment_preview', request_method='POST',
172 172 renderer='string', xhr=True)
173 173
174 174 config.add_route(
175 175 name='repo_commit_comment_history_view',
176 176 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_history_id}/history_view', repo_route=True)
177 177 config.add_view(
178 178 RepoCommitsView,
179 179 attr='repo_commit_comment_history_view',
180 180 route_name='repo_commit_comment_history_view', request_method='POST',
181 181 renderer='string', xhr=True)
182 182
183 183 config.add_route(
184 184 name='repo_commit_comment_attachment_upload',
185 185 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/attachment_upload', repo_route=True)
186 186 config.add_view(
187 187 RepoCommitsView,
188 188 attr='repo_commit_comment_attachment_upload',
189 189 route_name='repo_commit_comment_attachment_upload', request_method='POST',
190 190 renderer='json_ext', xhr=True)
191 191
192 192 config.add_route(
193 193 name='repo_commit_comment_delete',
194 194 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True)
195 195 config.add_view(
196 196 RepoCommitsView,
197 197 attr='repo_commit_comment_delete',
198 198 route_name='repo_commit_comment_delete', request_method='POST',
199 199 renderer='json_ext')
200 200
201 201 config.add_route(
202 202 name='repo_commit_comment_edit',
203 203 pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/edit', repo_route=True)
204 204 config.add_view(
205 205 RepoCommitsView,
206 206 attr='repo_commit_comment_edit',
207 207 route_name='repo_commit_comment_edit', request_method='POST',
208 208 renderer='json_ext')
209 209
210 210 # still working url for backward compat.
211 211 config.add_route(
212 212 name='repo_commit_raw_deprecated',
213 213 pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True)
214 214 config.add_view(
215 215 RepoCommitsView,
216 216 attr='repo_commit_raw',
217 217 route_name='repo_commit_raw_deprecated', request_method='GET',
218 218 renderer=None)
219 219
220 220 # Files
221 221 config.add_route(
222 222 name='repo_archivefile',
223 223 pattern='/{repo_name:.*?[^/]}/archive/{fname:.*}', repo_route=True)
224 224 config.add_view(
225 225 RepoFilesView,
226 226 attr='repo_archivefile',
227 227 route_name='repo_archivefile', request_method='GET',
228 228 renderer=None)
229 229
230 230 config.add_route(
231 231 name='repo_files_diff',
232 232 pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True)
233 233 config.add_view(
234 234 RepoFilesView,
235 235 attr='repo_files_diff',
236 236 route_name='repo_files_diff', request_method='GET',
237 237 renderer=None)
238 238
239 239 config.add_route( # legacy route to make old links work
240 240 name='repo_files_diff_2way_redirect',
241 241 pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True)
242 242 config.add_view(
243 243 RepoFilesView,
244 244 attr='repo_files_diff_2way_redirect',
245 245 route_name='repo_files_diff_2way_redirect', request_method='GET',
246 246 renderer=None)
247 247
248 248 config.add_route(
249 249 name='repo_files',
250 250 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True)
251 251 config.add_view(
252 252 RepoFilesView,
253 253 attr='repo_files',
254 254 route_name='repo_files', request_method='GET',
255 255 renderer=None)
256 256
257 257 config.add_route(
258 258 name='repo_files:default_path',
259 259 pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True)
260 260 config.add_view(
261 261 RepoFilesView,
262 262 attr='repo_files',
263 263 route_name='repo_files:default_path', request_method='GET',
264 264 renderer=None)
265 265
266 266 config.add_route(
267 267 name='repo_files:default_commit',
268 268 pattern='/{repo_name:.*?[^/]}/files', repo_route=True)
269 269 config.add_view(
270 270 RepoFilesView,
271 271 attr='repo_files',
272 272 route_name='repo_files:default_commit', request_method='GET',
273 273 renderer=None)
274 274
275 275 config.add_route(
276 276 name='repo_files:rendered',
277 277 pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True)
278 278 config.add_view(
279 279 RepoFilesView,
280 280 attr='repo_files',
281 281 route_name='repo_files:rendered', request_method='GET',
282 282 renderer=None)
283 283
284 284 config.add_route(
285 285 name='repo_files:annotated',
286 286 pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True)
287 287 config.add_view(
288 288 RepoFilesView,
289 289 attr='repo_files',
290 290 route_name='repo_files:annotated', request_method='GET',
291 291 renderer=None)
292 292
293 293 config.add_route(
294 294 name='repo_files:annotated_previous',
295 295 pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True)
296 296 config.add_view(
297 297 RepoFilesView,
298 298 attr='repo_files_annotated_previous',
299 299 route_name='repo_files:annotated_previous', request_method='GET',
300 300 renderer=None)
301 301
302 302 config.add_route(
303 303 name='repo_nodetree_full',
304 304 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True)
305 305 config.add_view(
306 306 RepoFilesView,
307 307 attr='repo_nodetree_full',
308 308 route_name='repo_nodetree_full', request_method='GET',
309 309 renderer=None, xhr=True)
310 310
311 311 config.add_route(
312 312 name='repo_nodetree_full:default_path',
313 313 pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True)
314 314 config.add_view(
315 315 RepoFilesView,
316 316 attr='repo_nodetree_full',
317 317 route_name='repo_nodetree_full:default_path', request_method='GET',
318 318 renderer=None, xhr=True)
319 319
320 320 config.add_route(
321 321 name='repo_files_nodelist',
322 322 pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True)
323 323 config.add_view(
324 324 RepoFilesView,
325 325 attr='repo_nodelist',
326 326 route_name='repo_files_nodelist', request_method='GET',
327 327 renderer='json_ext', xhr=True)
328 328
329 329 config.add_route(
330 330 name='repo_file_raw',
331 331 pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True)
332 332 config.add_view(
333 333 RepoFilesView,
334 334 attr='repo_file_raw',
335 335 route_name='repo_file_raw', request_method='GET',
336 336 renderer=None)
337 337
338 338 config.add_route(
339 339 name='repo_file_download',
340 340 pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True)
341 341 config.add_view(
342 342 RepoFilesView,
343 343 attr='repo_file_download',
344 344 route_name='repo_file_download', request_method='GET',
345 345 renderer=None)
346 346
347 347 config.add_route( # backward compat to keep old links working
348 348 name='repo_file_download:legacy',
349 349 pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}',
350 350 repo_route=True)
351 351 config.add_view(
352 352 RepoFilesView,
353 353 attr='repo_file_download',
354 354 route_name='repo_file_download:legacy', request_method='GET',
355 355 renderer=None)
356 356
357 357 config.add_route(
358 358 name='repo_file_history',
359 359 pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True)
360 360 config.add_view(
361 361 RepoFilesView,
362 362 attr='repo_file_history',
363 363 route_name='repo_file_history', request_method='GET',
364 364 renderer='json_ext')
365 365
366 366 config.add_route(
367 367 name='repo_file_authors',
368 368 pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True)
369 369 config.add_view(
370 370 RepoFilesView,
371 371 attr='repo_file_authors',
372 372 route_name='repo_file_authors', request_method='GET',
373 373 renderer='rhodecode:templates/files/file_authors_box.mako')
374 374
375 375 config.add_route(
376 376 name='repo_files_check_head',
377 377 pattern='/{repo_name:.*?[^/]}/check_head/{commit_id}/{f_path:.*}',
378 378 repo_route=True)
379 379 config.add_view(
380 380 RepoFilesView,
381 381 attr='repo_files_check_head',
382 382 route_name='repo_files_check_head', request_method='POST',
383 383 renderer='json_ext', xhr=True)
384 384
385 385 config.add_route(
386 386 name='repo_files_remove_file',
387 387 pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}',
388 388 repo_route=True)
389 389 config.add_view(
390 390 RepoFilesView,
391 391 attr='repo_files_remove_file',
392 392 route_name='repo_files_remove_file', request_method='GET',
393 393 renderer='rhodecode:templates/files/files_delete.mako')
394 394
395 395 config.add_route(
396 396 name='repo_files_delete_file',
397 397 pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}',
398 398 repo_route=True)
399 399 config.add_view(
400 400 RepoFilesView,
401 401 attr='repo_files_delete_file',
402 402 route_name='repo_files_delete_file', request_method='POST',
403 403 renderer=None)
404 404
405 405 config.add_route(
406 406 name='repo_files_edit_file',
407 407 pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}',
408 408 repo_route=True)
409 409 config.add_view(
410 410 RepoFilesView,
411 411 attr='repo_files_edit_file',
412 412 route_name='repo_files_edit_file', request_method='GET',
413 413 renderer='rhodecode:templates/files/files_edit.mako')
414 414
415 415 config.add_route(
416 416 name='repo_files_update_file',
417 417 pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}',
418 418 repo_route=True)
419 419 config.add_view(
420 420 RepoFilesView,
421 421 attr='repo_files_update_file',
422 422 route_name='repo_files_update_file', request_method='POST',
423 423 renderer=None)
424 424
425 425 config.add_route(
426 426 name='repo_files_add_file',
427 427 pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}',
428 428 repo_route=True)
429 429 config.add_view(
430 430 RepoFilesView,
431 431 attr='repo_files_add_file',
432 432 route_name='repo_files_add_file', request_method='GET',
433 433 renderer='rhodecode:templates/files/files_add.mako')
434 434
435 435 config.add_route(
436 436 name='repo_files_upload_file',
437 437 pattern='/{repo_name:.*?[^/]}/upload_file/{commit_id}/{f_path:.*}',
438 438 repo_route=True)
439 439 config.add_view(
440 440 RepoFilesView,
441 441 attr='repo_files_add_file',
442 442 route_name='repo_files_upload_file', request_method='GET',
443 443 renderer='rhodecode:templates/files/files_upload.mako')
444 444 config.add_view( # POST creates
445 445 RepoFilesView,
446 446 attr='repo_files_upload_file',
447 447 route_name='repo_files_upload_file', request_method='POST',
448 448 renderer='json_ext')
449 449
450 450 config.add_route(
451 451 name='repo_files_create_file',
452 452 pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}',
453 453 repo_route=True)
454 454 config.add_view( # POST creates
455 455 RepoFilesView,
456 456 attr='repo_files_create_file',
457 457 route_name='repo_files_create_file', request_method='POST',
458 458 renderer=None)
459 459
460 460 # Refs data
461 461 config.add_route(
462 462 name='repo_refs_data',
463 463 pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True)
464 464 config.add_view(
465 465 RepoSummaryView,
466 466 attr='repo_refs_data',
467 467 route_name='repo_refs_data', request_method='GET',
468 468 renderer='json_ext')
469 469
470 470 config.add_route(
471 471 name='repo_refs_changelog_data',
472 472 pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True)
473 473 config.add_view(
474 474 RepoSummaryView,
475 475 attr='repo_refs_changelog_data',
476 476 route_name='repo_refs_changelog_data', request_method='GET',
477 477 renderer='json_ext')
478 478
479 479 config.add_route(
480 480 name='repo_stats',
481 481 pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True)
482 482 config.add_view(
483 483 RepoSummaryView,
484 484 attr='repo_stats',
485 485 route_name='repo_stats', request_method='GET',
486 486 renderer='json_ext')
487 487
488 488 # Commits
489 489 config.add_route(
490 490 name='repo_commits',
491 491 pattern='/{repo_name:.*?[^/]}/commits', repo_route=True)
492 492 config.add_view(
493 493 RepoChangelogView,
494 494 attr='repo_changelog',
495 495 route_name='repo_commits', request_method='GET',
496 496 renderer='rhodecode:templates/commits/changelog.mako')
497 497 # old routes for backward compat
498 498 config.add_view(
499 499 RepoChangelogView,
500 500 attr='repo_changelog',
501 501 route_name='repo_changelog', request_method='GET',
502 502 renderer='rhodecode:templates/commits/changelog.mako')
503 503
504 504 config.add_route(
505 505 name='repo_commits_elements',
506 506 pattern='/{repo_name:.*?[^/]}/commits_elements', repo_route=True)
507 507 config.add_view(
508 508 RepoChangelogView,
509 509 attr='repo_commits_elements',
510 510 route_name='repo_commits_elements', request_method=('GET', 'POST'),
511 511 renderer='rhodecode:templates/commits/changelog_elements.mako',
512 512 xhr=True)
513 513
514 514 config.add_route(
515 515 name='repo_commits_elements_file',
516 516 pattern='/{repo_name:.*?[^/]}/commits_elements/{commit_id}/{f_path:.*}', repo_route=True)
517 517 config.add_view(
518 518 RepoChangelogView,
519 519 attr='repo_commits_elements',
520 520 route_name='repo_commits_elements_file', request_method=('GET', 'POST'),
521 521 renderer='rhodecode:templates/commits/changelog_elements.mako',
522 522 xhr=True)
523 523
524 524 config.add_route(
525 525 name='repo_commits_file',
526 526 pattern='/{repo_name:.*?[^/]}/commits/{commit_id}/{f_path:.*}', repo_route=True)
527 527 config.add_view(
528 528 RepoChangelogView,
529 529 attr='repo_changelog',
530 530 route_name='repo_commits_file', request_method='GET',
531 531 renderer='rhodecode:templates/commits/changelog.mako')
532 532 # old routes for backward compat
533 533 config.add_view(
534 534 RepoChangelogView,
535 535 attr='repo_changelog',
536 536 route_name='repo_changelog_file', request_method='GET',
537 537 renderer='rhodecode:templates/commits/changelog.mako')
538 538
539 539 # Changelog (old deprecated name for commits page)
540 540 config.add_route(
541 541 name='repo_changelog',
542 542 pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True)
543 543 config.add_route(
544 544 name='repo_changelog_file',
545 545 pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True)
546 546
547 547 # Compare
548 548 config.add_route(
549 549 name='repo_compare_select',
550 550 pattern='/{repo_name:.*?[^/]}/compare', repo_route=True)
551 551 config.add_view(
552 552 RepoCompareView,
553 553 attr='compare_select',
554 554 route_name='repo_compare_select', request_method='GET',
555 555 renderer='rhodecode:templates/compare/compare_diff.mako')
556 556
557 557 config.add_route(
558 558 name='repo_compare',
559 559 pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True)
560 560 config.add_view(
561 561 RepoCompareView,
562 562 attr='compare',
563 563 route_name='repo_compare', request_method='GET',
564 564 renderer=None)
565 565
566 566 # Tags
567 567 config.add_route(
568 568 name='tags_home',
569 569 pattern='/{repo_name:.*?[^/]}/tags', repo_route=True)
570 570 config.add_view(
571 571 RepoTagsView,
572 572 attr='tags',
573 573 route_name='tags_home', request_method='GET',
574 574 renderer='rhodecode:templates/tags/tags.mako')
575 575
576 576 # Branches
577 577 config.add_route(
578 578 name='branches_home',
579 579 pattern='/{repo_name:.*?[^/]}/branches', repo_route=True)
580 580 config.add_view(
581 581 RepoBranchesView,
582 582 attr='branches',
583 583 route_name='branches_home', request_method='GET',
584 584 renderer='rhodecode:templates/branches/branches.mako')
585 585
586 586 # Bookmarks
587 587 config.add_route(
588 588 name='bookmarks_home',
589 589 pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True)
590 590 config.add_view(
591 591 RepoBookmarksView,
592 592 attr='bookmarks',
593 593 route_name='bookmarks_home', request_method='GET',
594 594 renderer='rhodecode:templates/bookmarks/bookmarks.mako')
595 595
596 596 # Forks
597 597 config.add_route(
598 598 name='repo_fork_new',
599 599 pattern='/{repo_name:.*?[^/]}/fork', repo_route=True,
600 600 repo_forbid_when_archived=True,
601 601 repo_accepted_types=['hg', 'git'])
602 602 config.add_view(
603 603 RepoForksView,
604 604 attr='repo_fork_new',
605 605 route_name='repo_fork_new', request_method='GET',
606 606 renderer='rhodecode:templates/forks/forks.mako')
607 607
608 608 config.add_route(
609 609 name='repo_fork_create',
610 610 pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True,
611 611 repo_forbid_when_archived=True,
612 612 repo_accepted_types=['hg', 'git'])
613 613 config.add_view(
614 614 RepoForksView,
615 615 attr='repo_fork_create',
616 616 route_name='repo_fork_create', request_method='POST',
617 617 renderer='rhodecode:templates/forks/fork.mako')
618 618
619 619 config.add_route(
620 620 name='repo_forks_show_all',
621 621 pattern='/{repo_name:.*?[^/]}/forks', repo_route=True,
622 622 repo_accepted_types=['hg', 'git'])
623 623 config.add_view(
624 624 RepoForksView,
625 625 attr='repo_forks_show_all',
626 626 route_name='repo_forks_show_all', request_method='GET',
627 627 renderer='rhodecode:templates/forks/forks.mako')
628 628
629 629 config.add_route(
630 630 name='repo_forks_data',
631 631 pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True,
632 632 repo_accepted_types=['hg', 'git'])
633 633 config.add_view(
634 634 RepoForksView,
635 635 attr='repo_forks_data',
636 636 route_name='repo_forks_data', request_method='GET',
637 637 renderer='json_ext', xhr=True)
638 638
639 639 # Pull Requests
640 640 config.add_route(
641 641 name='pullrequest_show',
642 642 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}',
643 643 repo_route=True)
644 644 config.add_view(
645 645 RepoPullRequestsView,
646 646 attr='pull_request_show',
647 647 route_name='pullrequest_show', request_method='GET',
648 648 renderer='rhodecode:templates/pullrequests/pullrequest_show.mako')
649 649
650 650 config.add_route(
651 651 name='pullrequest_show_all',
652 652 pattern='/{repo_name:.*?[^/]}/pull-request',
653 653 repo_route=True, repo_accepted_types=['hg', 'git'])
654 654 config.add_view(
655 655 RepoPullRequestsView,
656 656 attr='pull_request_list',
657 657 route_name='pullrequest_show_all', request_method='GET',
658 658 renderer='rhodecode:templates/pullrequests/pullrequests.mako')
659 659
660 660 config.add_route(
661 661 name='pullrequest_show_all_data',
662 662 pattern='/{repo_name:.*?[^/]}/pull-request-data',
663 663 repo_route=True, repo_accepted_types=['hg', 'git'])
664 664 config.add_view(
665 665 RepoPullRequestsView,
666 666 attr='pull_request_list_data',
667 667 route_name='pullrequest_show_all_data', request_method='GET',
668 668 renderer='json_ext', xhr=True)
669 669
670 670 config.add_route(
671 671 name='pullrequest_repo_refs',
672 672 pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}',
673 673 repo_route=True)
674 674 config.add_view(
675 675 RepoPullRequestsView,
676 676 attr='pull_request_repo_refs',
677 677 route_name='pullrequest_repo_refs', request_method='GET',
678 678 renderer='json_ext', xhr=True)
679 679
680 680 config.add_route(
681 681 name='pullrequest_repo_targets',
682 682 pattern='/{repo_name:.*?[^/]}/pull-request/repo-targets',
683 683 repo_route=True)
684 684 config.add_view(
685 685 RepoPullRequestsView,
686 686 attr='pullrequest_repo_targets',
687 687 route_name='pullrequest_repo_targets', request_method='GET',
688 688 renderer='json_ext', xhr=True)
689 689
690 690 config.add_route(
691 691 name='pullrequest_new',
692 692 pattern='/{repo_name:.*?[^/]}/pull-request/new',
693 693 repo_route=True, repo_accepted_types=['hg', 'git'],
694 694 repo_forbid_when_archived=True)
695 695 config.add_view(
696 696 RepoPullRequestsView,
697 697 attr='pull_request_new',
698 698 route_name='pullrequest_new', request_method='GET',
699 699 renderer='rhodecode:templates/pullrequests/pullrequest.mako')
700 700
701 701 config.add_route(
702 702 name='pullrequest_create',
703 703 pattern='/{repo_name:.*?[^/]}/pull-request/create',
704 704 repo_route=True, repo_accepted_types=['hg', 'git'],
705 705 repo_forbid_when_archived=True)
706 706 config.add_view(
707 707 RepoPullRequestsView,
708 708 attr='pull_request_create',
709 709 route_name='pullrequest_create', request_method='POST',
710 710 renderer=None)
711 711
712 712 config.add_route(
713 713 name='pullrequest_update',
714 714 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update',
715 715 repo_route=True, repo_forbid_when_archived=True)
716 716 config.add_view(
717 717 RepoPullRequestsView,
718 718 attr='pull_request_update',
719 719 route_name='pullrequest_update', request_method='POST',
720 720 renderer='json_ext')
721 721
722 722 config.add_route(
723 723 name='pullrequest_merge',
724 724 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge',
725 725 repo_route=True, repo_forbid_when_archived=True)
726 726 config.add_view(
727 727 RepoPullRequestsView,
728 728 attr='pull_request_merge',
729 729 route_name='pullrequest_merge', request_method='POST',
730 730 renderer='json_ext')
731 731
732 732 config.add_route(
733 733 name='pullrequest_delete',
734 734 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete',
735 735 repo_route=True, repo_forbid_when_archived=True)
736 736 config.add_view(
737 737 RepoPullRequestsView,
738 738 attr='pull_request_delete',
739 739 route_name='pullrequest_delete', request_method='POST',
740 740 renderer='json_ext')
741 741
742 742 config.add_route(
743 743 name='pullrequest_comment_create',
744 744 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment',
745 745 repo_route=True)
746 746 config.add_view(
747 747 RepoPullRequestsView,
748 748 attr='pull_request_comment_create',
749 749 route_name='pullrequest_comment_create', request_method='POST',
750 750 renderer='json_ext')
751 751
752 752 config.add_route(
753 753 name='pullrequest_comment_edit',
754 754 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/edit',
755 755 repo_route=True, repo_accepted_types=['hg', 'git'])
756 756 config.add_view(
757 757 RepoPullRequestsView,
758 758 attr='pull_request_comment_edit',
759 759 route_name='pullrequest_comment_edit', request_method='POST',
760 760 renderer='json_ext')
761 761
762 762 config.add_route(
763 763 name='pullrequest_comment_delete',
764 764 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete',
765 765 repo_route=True, repo_accepted_types=['hg', 'git'])
766 766 config.add_view(
767 767 RepoPullRequestsView,
768 768 attr='pull_request_comment_delete',
769 769 route_name='pullrequest_comment_delete', request_method='POST',
770 770 renderer='json_ext')
771 771
772 772 config.add_route(
773 773 name='pullrequest_comments',
774 774 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comments',
775 775 repo_route=True)
776 776 config.add_view(
777 777 RepoPullRequestsView,
778 778 attr='pullrequest_comments',
779 779 route_name='pullrequest_comments', request_method='POST',
780 780 renderer='string_html', xhr=True)
781 781
782 782 config.add_route(
783 783 name='pullrequest_todos',
784 784 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/todos',
785 785 repo_route=True)
786 786 config.add_view(
787 787 RepoPullRequestsView,
788 788 attr='pullrequest_todos',
789 789 route_name='pullrequest_todos', request_method='POST',
790 790 renderer='string_html', xhr=True)
791 791
792 792 config.add_route(
793 793 name='pullrequest_drafts',
794 794 pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/drafts',
795 795 repo_route=True)
796 796 config.add_view(
797 797 RepoPullRequestsView,
798 798 attr='pullrequest_drafts',
799 799 route_name='pullrequest_drafts', request_method='POST',
800 800 renderer='string_html', xhr=True)
801 801
802 802 # Artifacts, (EE feature)
803 803 config.add_route(
804 804 name='repo_artifacts_list',
805 805 pattern='/{repo_name:.*?[^/]}/artifacts', repo_route=True)
806 806 config.add_view(
807 807 RepoArtifactsView,
808 808 attr='repo_artifacts',
809 809 route_name='repo_artifacts_list', request_method='GET',
810 810 renderer='rhodecode:templates/artifacts/artifact_list.mako')
811 811
812 812 # Settings
813 813 config.add_route(
814 814 name='edit_repo',
815 815 pattern='/{repo_name:.*?[^/]}/settings', repo_route=True)
816 816 config.add_view(
817 817 RepoSettingsView,
818 818 attr='edit_settings',
819 819 route_name='edit_repo', request_method='GET',
820 820 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
821 821 # update is POST on edit_repo
822 822 config.add_view(
823 823 RepoSettingsView,
824 824 attr='edit_settings_update',
825 825 route_name='edit_repo', request_method='POST',
826 826 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
827 827
828 828 # Settings advanced
829 829 config.add_route(
830 830 name='edit_repo_advanced',
831 831 pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True)
832 832 config.add_view(
833 833 RepoSettingsAdvancedView,
834 834 attr='edit_advanced',
835 835 route_name='edit_repo_advanced', request_method='GET',
836 836 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
837 837
838 838 config.add_route(
839 839 name='edit_repo_advanced_archive',
840 840 pattern='/{repo_name:.*?[^/]}/settings/advanced/archive', repo_route=True)
841 841 config.add_view(
842 842 RepoSettingsAdvancedView,
843 843 attr='edit_advanced_archive',
844 844 route_name='edit_repo_advanced_archive', request_method='POST',
845 845 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
846 846
847 847 config.add_route(
848 848 name='edit_repo_advanced_delete',
849 849 pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True)
850 850 config.add_view(
851 851 RepoSettingsAdvancedView,
852 852 attr='edit_advanced_delete',
853 853 route_name='edit_repo_advanced_delete', request_method='POST',
854 854 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
855 855
856 856 config.add_route(
857 857 name='edit_repo_advanced_locking',
858 858 pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True)
859 859 config.add_view(
860 860 RepoSettingsAdvancedView,
861 861 attr='edit_advanced_toggle_locking',
862 862 route_name='edit_repo_advanced_locking', request_method='POST',
863 863 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
864 864
865 865 config.add_route(
866 866 name='edit_repo_advanced_journal',
867 867 pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True)
868 868 config.add_view(
869 869 RepoSettingsAdvancedView,
870 870 attr='edit_advanced_journal',
871 871 route_name='edit_repo_advanced_journal', request_method='POST',
872 872 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
873 873
874 874 config.add_route(
875 875 name='edit_repo_advanced_fork',
876 876 pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True)
877 877 config.add_view(
878 878 RepoSettingsAdvancedView,
879 879 attr='edit_advanced_fork',
880 880 route_name='edit_repo_advanced_fork', request_method='POST',
881 881 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
882 882
883 883 config.add_route(
884 884 name='edit_repo_advanced_hooks',
885 885 pattern='/{repo_name:.*?[^/]}/settings/advanced/hooks', repo_route=True)
886 886 config.add_view(
887 887 RepoSettingsAdvancedView,
888 888 attr='edit_advanced_install_hooks',
889 889 route_name='edit_repo_advanced_hooks', request_method='GET',
890 890 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
891 891
892 892 # Caches
893 893 config.add_route(
894 894 name='edit_repo_caches',
895 895 pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True)
896 896 config.add_view(
897 897 RepoCachesView,
898 898 attr='repo_caches',
899 899 route_name='edit_repo_caches', request_method='GET',
900 900 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
901 901 config.add_view(
902 902 RepoCachesView,
903 903 attr='repo_caches_purge',
904 904 route_name='edit_repo_caches', request_method='POST')
905 905
906 906 # Permissions
907 907 config.add_route(
908 908 name='edit_repo_perms',
909 909 pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True)
910 910 config.add_view(
911 911 RepoSettingsPermissionsView,
912 912 attr='edit_permissions',
913 913 route_name='edit_repo_perms', request_method='GET',
914 914 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
915 915 config.add_view(
916 916 RepoSettingsPermissionsView,
917 917 attr='edit_permissions_update',
918 918 route_name='edit_repo_perms', request_method='POST',
919 919 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
920 920
921 921 config.add_route(
922 922 name='edit_repo_perms_set_private',
923 923 pattern='/{repo_name:.*?[^/]}/settings/permissions/set_private', repo_route=True)
924 924 config.add_view(
925 925 RepoSettingsPermissionsView,
926 926 attr='edit_permissions_set_private_repo',
927 927 route_name='edit_repo_perms_set_private', request_method='POST',
928 928 renderer='json_ext')
929 929
930 930 # Permissions Branch (EE feature)
931 931 config.add_route(
932 932 name='edit_repo_perms_branch',
933 933 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions', repo_route=True)
934 934 config.add_view(
935 RepoBranchesView,
936 attr='branches',
935 RepoSettingsBranchPermissionsView,
936 attr='branch_permissions',
937 937 route_name='edit_repo_perms_branch', request_method='GET',
938 938 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
939 939
940 940 config.add_route(
941 941 name='edit_repo_perms_branch_delete',
942 942 pattern='/{repo_name:.*?[^/]}/settings/branch_permissions/{rule_id}/delete',
943 943 repo_route=True)
944 944 ## Only implemented in EE
945 945
946 946 # Maintenance
947 947 config.add_route(
948 948 name='edit_repo_maintenance',
949 949 pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True)
950 950 config.add_view(
951 951 RepoMaintenanceView,
952 952 attr='repo_maintenance',
953 route_name='edit_repo_maintenance_execute', request_method='GET',
954 renderer='json', xhr=True)
953 route_name='edit_repo_maintenance', request_method='GET',
954 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
955 955
956 956 config.add_route(
957 957 name='edit_repo_maintenance_execute',
958 958 pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True)
959 959 config.add_view(
960 960 RepoMaintenanceView,
961 961 attr='repo_maintenance_execute',
962 route_name='edit_repo_maintenance', request_method='GET',
963 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
962 route_name='edit_repo_maintenance_execute', request_method='GET',
963 renderer='json', xhr=True)
964 964
965 965 # Fields
966 966 config.add_route(
967 967 name='edit_repo_fields',
968 968 pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True)
969 969 config.add_view(
970 970 RepoSettingsFieldsView,
971 971 attr='repo_field_edit',
972 972 route_name='edit_repo_fields', request_method='GET',
973 973 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
974 974
975 975 config.add_route(
976 976 name='edit_repo_fields_create',
977 977 pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True)
978 978 config.add_view(
979 979 RepoSettingsFieldsView,
980 980 attr='repo_field_create',
981 981 route_name='edit_repo_fields_create', request_method='POST',
982 982 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
983 983
984 984 config.add_route(
985 985 name='edit_repo_fields_delete',
986 986 pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True)
987 987 config.add_view(
988 988 RepoSettingsFieldsView,
989 989 attr='repo_field_delete',
990 990 route_name='edit_repo_fields_delete', request_method='POST',
991 991 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
992 992
993 993 # Locking
994 994 config.add_route(
995 995 name='repo_edit_toggle_locking',
996 996 pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True)
997 997 config.add_view(
998 998 RepoSettingsView,
999 999 attr='edit_advanced_toggle_locking',
1000 1000 route_name='repo_edit_toggle_locking', request_method='GET',
1001 1001 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1002 1002
1003 1003 # Remote
1004 1004 config.add_route(
1005 1005 name='edit_repo_remote',
1006 1006 pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True)
1007 1007 config.add_view(
1008 1008 RepoSettingsRemoteView,
1009 1009 attr='repo_remote_edit_form',
1010 1010 route_name='edit_repo_remote', request_method='GET',
1011 1011 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1012 1012
1013 1013 config.add_route(
1014 1014 name='edit_repo_remote_pull',
1015 1015 pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True)
1016 1016 config.add_view(
1017 1017 RepoSettingsRemoteView,
1018 1018 attr='repo_remote_pull_changes',
1019 1019 route_name='edit_repo_remote_pull', request_method='POST',
1020 1020 renderer=None)
1021 1021
1022 1022 config.add_route(
1023 1023 name='edit_repo_remote_push',
1024 1024 pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True)
1025 1025
1026 1026 # Statistics
1027 1027 config.add_route(
1028 1028 name='edit_repo_statistics',
1029 1029 pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True)
1030 1030 config.add_view(
1031 1031 RepoSettingsView,
1032 1032 attr='edit_statistics_form',
1033 1033 route_name='edit_repo_statistics', request_method='GET',
1034 1034 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1035 1035
1036 1036 config.add_route(
1037 1037 name='edit_repo_statistics_reset',
1038 1038 pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True)
1039 1039 config.add_view(
1040 1040 RepoSettingsView,
1041 1041 attr='repo_statistics_reset',
1042 1042 route_name='edit_repo_statistics_reset', request_method='POST',
1043 1043 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1044 1044
1045 1045 # Issue trackers
1046 1046 config.add_route(
1047 1047 name='edit_repo_issuetracker',
1048 1048 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True)
1049 1049 config.add_view(
1050 1050 RepoSettingsIssueTrackersView,
1051 1051 attr='repo_issuetracker',
1052 1052 route_name='edit_repo_issuetracker', request_method='GET',
1053 1053 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1054 1054
1055 1055 config.add_route(
1056 1056 name='edit_repo_issuetracker_test',
1057 1057 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True)
1058 1058 config.add_view(
1059 1059 RepoSettingsIssueTrackersView,
1060 1060 attr='repo_issuetracker_test',
1061 1061 route_name='edit_repo_issuetracker_test', request_method='POST',
1062 1062 renderer='string', xhr=True)
1063 1063
1064 1064 config.add_route(
1065 1065 name='edit_repo_issuetracker_delete',
1066 1066 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True)
1067 1067 config.add_view(
1068 1068 RepoSettingsIssueTrackersView,
1069 1069 attr='repo_issuetracker_delete',
1070 1070 route_name='edit_repo_issuetracker_delete', request_method='POST',
1071 1071 renderer='json_ext', xhr=True)
1072 1072
1073 1073 config.add_route(
1074 1074 name='edit_repo_issuetracker_update',
1075 1075 pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True)
1076 1076 config.add_view(
1077 1077 RepoSettingsIssueTrackersView,
1078 1078 attr='repo_issuetracker_update',
1079 1079 route_name='edit_repo_issuetracker_update', request_method='POST',
1080 1080 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1081 1081
1082 1082 # VCS Settings
1083 1083 config.add_route(
1084 1084 name='edit_repo_vcs',
1085 1085 pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True)
1086 1086 config.add_view(
1087 1087 RepoSettingsVcsView,
1088 1088 attr='repo_vcs_settings',
1089 1089 route_name='edit_repo_vcs', request_method='GET',
1090 1090 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1091 1091
1092 1092 config.add_route(
1093 1093 name='edit_repo_vcs_update',
1094 1094 pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True)
1095 1095 config.add_view(
1096 1096 RepoSettingsVcsView,
1097 1097 attr='repo_settings_vcs_update',
1098 1098 route_name='edit_repo_vcs_update', request_method='POST',
1099 1099 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1100 1100
1101 1101 # svn pattern
1102 1102 config.add_route(
1103 1103 name='edit_repo_vcs_svn_pattern_delete',
1104 1104 pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True)
1105 1105 config.add_view(
1106 1106 RepoSettingsVcsView,
1107 1107 attr='repo_settings_delete_svn_pattern',
1108 1108 route_name='edit_repo_vcs_svn_pattern_delete', request_method='POST',
1109 1109 renderer='json_ext', xhr=True)
1110 1110
1111 1111 # Repo Review Rules (EE feature)
1112 1112 config.add_route(
1113 1113 name='repo_reviewers',
1114 1114 pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True)
1115 1115 config.add_view(
1116 1116 RepoReviewRulesView,
1117 1117 attr='repo_review_rules',
1118 1118 route_name='repo_reviewers', request_method='GET',
1119 1119 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1120 1120
1121 1121 config.add_route(
1122 1122 name='repo_default_reviewers_data',
1123 1123 pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True)
1124 1124 config.add_view(
1125 1125 RepoReviewRulesView,
1126 1126 attr='repo_default_reviewers_data',
1127 1127 route_name='repo_default_reviewers_data', request_method='GET',
1128 1128 renderer='json_ext')
1129 1129
1130 1130 # Repo Automation (EE feature)
1131 1131 config.add_route(
1132 1132 name='repo_automation',
1133 1133 pattern='/{repo_name:.*?[^/]}/settings/automation', repo_route=True)
1134 1134 config.add_view(
1135 1135 RepoAutomationView,
1136 1136 attr='repo_automation',
1137 1137 route_name='repo_automation', request_method='GET',
1138 1138 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1139 1139
1140 1140 # Strip
1141 1141 config.add_route(
1142 1142 name='edit_repo_strip',
1143 1143 pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True)
1144 1144 config.add_view(
1145 1145 RepoStripView,
1146 1146 attr='strip',
1147 1147 route_name='edit_repo_strip', request_method='GET',
1148 1148 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1149 1149
1150 1150 config.add_route(
1151 1151 name='strip_check',
1152 1152 pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True)
1153 1153 config.add_view(
1154 1154 RepoStripView,
1155 1155 attr='strip_check',
1156 1156 route_name='strip_check', request_method='POST',
1157 1157 renderer='json', xhr=True)
1158 1158
1159 1159 config.add_route(
1160 1160 name='strip_execute',
1161 1161 pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True)
1162 1162 config.add_view(
1163 1163 RepoStripView,
1164 1164 attr='strip_execute',
1165 1165 route_name='strip_execute', request_method='POST',
1166 1166 renderer='json', xhr=True)
1167 1167
1168 1168 # Audit logs
1169 1169 config.add_route(
1170 1170 name='edit_repo_audit_logs',
1171 1171 pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True)
1172 1172 config.add_view(
1173 1173 AuditLogsView,
1174 1174 attr='repo_audit_logs',
1175 1175 route_name='edit_repo_audit_logs', request_method='GET',
1176 1176 renderer='rhodecode:templates/admin/repos/repo_edit.mako')
1177 1177
1178 1178 # ATOM/RSS Feed, shouldn't contain slashes for outlook compatibility
1179 1179 config.add_route(
1180 1180 name='rss_feed_home',
1181 1181 pattern='/{repo_name:.*?[^/]}/feed-rss', repo_route=True)
1182 1182 config.add_view(
1183 1183 RepoFeedView,
1184 1184 attr='rss',
1185 1185 route_name='rss_feed_home', request_method='GET', renderer=None)
1186 1186
1187 1187 config.add_route(
1188 1188 name='rss_feed_home_old',
1189 1189 pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True)
1190 1190 config.add_view(
1191 1191 RepoFeedView,
1192 1192 attr='rss',
1193 1193 route_name='rss_feed_home_old', request_method='GET', renderer=None)
1194 1194
1195 1195 config.add_route(
1196 1196 name='atom_feed_home',
1197 1197 pattern='/{repo_name:.*?[^/]}/feed-atom', repo_route=True)
1198 1198 config.add_view(
1199 1199 RepoFeedView,
1200 1200 attr='atom',
1201 1201 route_name='atom_feed_home', request_method='GET', renderer=None)
1202 1202
1203 1203 config.add_route(
1204 1204 name='atom_feed_home_old',
1205 1205 pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True)
1206 1206 config.add_view(
1207 1207 RepoFeedView,
1208 1208 attr='atom',
1209 1209 route_name='atom_feed_home_old', request_method='GET', renderer=None)
1210 1210
1211 1211 # NOTE(marcink): needs to be at the end for catch-all
1212 1212 add_route_with_slash(
1213 1213 config,
1214 1214 name='repo_summary',
1215 1215 pattern='/{repo_name:.*?[^/]}', repo_route=True)
1216 1216 config.add_view(
1217 1217 RepoSummaryView,
1218 1218 attr='summary',
1219 1219 route_name='repo_summary', request_method='GET',
1220 1220 renderer='rhodecode:templates/summary/summary.mako')
1221 1221
1222 1222 # TODO(marcink): there's no such route??
1223 1223 config.add_view(
1224 1224 RepoSummaryView,
1225 1225 attr='summary',
1226 1226 route_name='repo_summary_slash', request_method='GET',
1227 1227 renderer='rhodecode:templates/summary/summary.mako') No newline at end of file
@@ -1,1070 +1,1092 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps.repository.tests.test_repo_compare import ComparePage
27 27 from rhodecode.apps.repository.views.repo_files import RepoFilesView
28 28 from rhodecode.lib import helpers as h
29 29 from rhodecode.lib.compat import OrderedDict
30 30 from rhodecode.lib.ext_json import json
31 31 from rhodecode.lib.vcs import nodes
32 32
33 33 from rhodecode.lib.vcs.conf import settings
34 34 from rhodecode.tests import assert_session_flash
35 35 from rhodecode.tests.fixture import Fixture
36 36 from rhodecode.model.db import Session
37 37
38 38 fixture = Fixture()
39 39
40 40
41 41 def get_node_history(backend_type):
42 42 return {
43 43 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
44 44 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
45 45 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
46 46 }[backend_type]
47 47
48 48
49 49 def route_path(name, params=None, **kwargs):
50 50 import urllib
51 51
52 52 base_url = {
53 53 'repo_summary': '/{repo_name}',
54 54 'repo_archivefile': '/{repo_name}/archive/{fname}',
55 55 'repo_files_diff': '/{repo_name}/diff/{f_path}',
56 56 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
57 57 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
58 58 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
59 59 'repo_files:default_commit': '/{repo_name}/files',
60 60 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
61 61 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
62 62 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
63 63 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
64 64 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
65 65 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
66 66 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
67 67 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
68 68 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
69 69 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
70 70 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
71 71 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
72 72 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
73 73 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
74 74 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
75 75 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
76 76 }[name].format(**kwargs)
77 77
78 78 if params:
79 79 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
80 80 return base_url
81 81
82 82
83 83 def assert_files_in_response(response, files, params):
84 84 template = (
85 85 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
86 86 _assert_items_in_response(response, files, template, params)
87 87
88 88
89 89 def assert_dirs_in_response(response, dirs, params):
90 90 template = (
91 91 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
92 92 _assert_items_in_response(response, dirs, template, params)
93 93
94 94
95 95 def _assert_items_in_response(response, items, template, params):
96 96 for item in items:
97 97 item_params = {'name': item}
98 98 item_params.update(params)
99 99 response.mustcontain(template % item_params)
100 100
101 101
102 102 def assert_timeago_in_response(response, items, params):
103 103 for item in items:
104 104 response.mustcontain(h.age_component(params['date']))
105 105
106 106
107 107 @pytest.mark.usefixtures("app")
108 108 class TestFilesViews(object):
109 109
110 110 def test_show_files(self, backend):
111 111 response = self.app.get(
112 112 route_path('repo_files',
113 113 repo_name=backend.repo_name,
114 114 commit_id='tip', f_path='/'))
115 115 commit = backend.repo.get_commit()
116 116
117 117 params = {
118 118 'repo_name': backend.repo_name,
119 119 'commit_id': commit.raw_id,
120 120 'date': commit.date
121 121 }
122 122 assert_dirs_in_response(response, ['docs', 'vcs'], params)
123 123 files = [
124 124 '.gitignore',
125 125 '.hgignore',
126 126 '.hgtags',
127 127 # TODO: missing in Git
128 128 # '.travis.yml',
129 129 'MANIFEST.in',
130 130 'README.rst',
131 131 # TODO: File is missing in svn repository
132 132 # 'run_test_and_report.sh',
133 133 'setup.cfg',
134 134 'setup.py',
135 135 'test_and_report.sh',
136 136 'tox.ini',
137 137 ]
138 138 assert_files_in_response(response, files, params)
139 139 assert_timeago_in_response(response, files, params)
140 140
141 141 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
142 142 repo = backend_hg['subrepos']
143 143 response = self.app.get(
144 144 route_path('repo_files',
145 145 repo_name=repo.repo_name,
146 146 commit_id='tip', f_path='/'))
147 147 assert_response = response.assert_response()
148 148 assert_response.contains_one_link(
149 149 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
150 150
151 151 def test_show_files_links_submodules_with_absolute_url_subpaths(
152 152 self, backend_hg):
153 153 repo = backend_hg['subrepos']
154 154 response = self.app.get(
155 155 route_path('repo_files',
156 156 repo_name=repo.repo_name,
157 157 commit_id='tip', f_path='/'))
158 158 assert_response = response.assert_response()
159 159 assert_response.contains_one_link(
160 160 'subpaths-path @ 000000000000',
161 161 'http://sub-base.example.com/subpaths-path')
162 162
163 163 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
164 164 def test_files_menu(self, backend):
165 165 new_branch = "temp_branch_name"
166 166 commits = [
167 167 {'message': 'a'},
168 168 {'message': 'b', 'branch': new_branch}
169 169 ]
170 170 backend.create_repo(commits)
171 171 backend.repo.landing_rev = "branch:%s" % new_branch
172 172 Session().commit()
173 173
174 174 # get response based on tip and not new commit
175 175 response = self.app.get(
176 176 route_path('repo_files',
177 177 repo_name=backend.repo_name,
178 178 commit_id='tip', f_path='/'))
179 179
180 180 # make sure Files menu url is not tip but new commit
181 181 landing_rev = backend.repo.landing_ref_name
182 182 files_url = route_path('repo_files:default_path',
183 183 repo_name=backend.repo_name,
184 184 commit_id=landing_rev, params={'at': landing_rev})
185 185
186 186 assert landing_rev != 'tip'
187 187 response.mustcontain(
188 188 '<li class="active"><a class="menulink" href="%s">' % files_url)
189 189
190 190 def test_show_files_commit(self, backend):
191 191 commit = backend.repo.get_commit(commit_idx=32)
192 192
193 193 response = self.app.get(
194 194 route_path('repo_files',
195 195 repo_name=backend.repo_name,
196 196 commit_id=commit.raw_id, f_path='/'))
197 197
198 198 dirs = ['docs', 'tests']
199 199 files = ['README.rst']
200 200 params = {
201 201 'repo_name': backend.repo_name,
202 202 'commit_id': commit.raw_id,
203 203 }
204 204 assert_dirs_in_response(response, dirs, params)
205 205 assert_files_in_response(response, files, params)
206 206
207 207 def test_show_files_different_branch(self, backend):
208 208 branches = dict(
209 209 hg=(150, ['git']),
210 210 # TODO: Git test repository does not contain other branches
211 211 git=(633, ['master']),
212 212 # TODO: Branch support in Subversion
213 213 svn=(150, [])
214 214 )
215 215 idx, branches = branches[backend.alias]
216 216 commit = backend.repo.get_commit(commit_idx=idx)
217 217 response = self.app.get(
218 218 route_path('repo_files',
219 219 repo_name=backend.repo_name,
220 220 commit_id=commit.raw_id, f_path='/'))
221 221
222 222 assert_response = response.assert_response()
223 223 for branch in branches:
224 224 assert_response.element_contains('.tags .branchtag', branch)
225 225
226 226 def test_show_files_paging(self, backend):
227 227 repo = backend.repo
228 228 indexes = [73, 92, 109, 1, 0]
229 229 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
230 230 for rev in indexes]
231 231
232 232 for idx in idx_map:
233 233 response = self.app.get(
234 234 route_path('repo_files',
235 235 repo_name=backend.repo_name,
236 236 commit_id=idx[1], f_path='/'))
237 237
238 238 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
239 239
240 240 def test_file_source(self, backend):
241 241 commit = backend.repo.get_commit(commit_idx=167)
242 242 response = self.app.get(
243 243 route_path('repo_files',
244 244 repo_name=backend.repo_name,
245 245 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
246 246
247 247 msgbox = """<div class="commit">%s</div>"""
248 248 response.mustcontain(msgbox % (commit.message, ))
249 249
250 250 assert_response = response.assert_response()
251 251 if commit.branch:
252 252 assert_response.element_contains(
253 253 '.tags.tags-main .branchtag', commit.branch)
254 254 if commit.tags:
255 255 for tag in commit.tags:
256 256 assert_response.element_contains('.tags.tags-main .tagtag', tag)
257 257
258 258 def test_file_source_annotated(self, backend):
259 259 response = self.app.get(
260 260 route_path('repo_files:annotated',
261 261 repo_name=backend.repo_name,
262 262 commit_id='tip', f_path='vcs/nodes.py'))
263 263 expected_commits = {
264 264 'hg': 'r356',
265 265 'git': 'r345',
266 266 'svn': 'r208',
267 267 }
268 268 response.mustcontain(expected_commits[backend.alias])
269 269
270 270 def test_file_source_authors(self, backend):
271 271 response = self.app.get(
272 272 route_path('repo_file_authors',
273 273 repo_name=backend.repo_name,
274 274 commit_id='tip', f_path='vcs/nodes.py'))
275 275 expected_authors = {
276 276 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
277 277 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
278 278 'svn': ('marcin', 'lukasz'),
279 279 }
280 280
281 281 for author in expected_authors[backend.alias]:
282 282 response.mustcontain(author)
283 283
284 284 def test_file_source_authors_with_annotation(self, backend):
285 285 response = self.app.get(
286 286 route_path('repo_file_authors',
287 287 repo_name=backend.repo_name,
288 288 commit_id='tip', f_path='vcs/nodes.py',
289 289 params=dict(annotate=1)))
290 290 expected_authors = {
291 291 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
292 292 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
293 293 'svn': ('marcin', 'lukasz'),
294 294 }
295 295
296 296 for author in expected_authors[backend.alias]:
297 297 response.mustcontain(author)
298 298
299 299 def test_file_source_history(self, backend, xhr_header):
300 300 response = self.app.get(
301 301 route_path('repo_file_history',
302 302 repo_name=backend.repo_name,
303 303 commit_id='tip', f_path='vcs/nodes.py'),
304 304 extra_environ=xhr_header)
305 305 assert get_node_history(backend.alias) == json.loads(response.body)
306 306
307 307 def test_file_source_history_svn(self, backend_svn, xhr_header):
308 308 simple_repo = backend_svn['svn-simple-layout']
309 309 response = self.app.get(
310 310 route_path('repo_file_history',
311 311 repo_name=simple_repo.repo_name,
312 312 commit_id='tip', f_path='trunk/example.py'),
313 313 extra_environ=xhr_header)
314 314
315 315 expected_data = json.loads(
316 316 fixture.load_resource('svn_node_history_branches.json'))
317 317
318 318 assert expected_data == response.json
319 319
320 320 def test_file_source_history_with_annotation(self, backend, xhr_header):
321 321 response = self.app.get(
322 322 route_path('repo_file_history',
323 323 repo_name=backend.repo_name,
324 324 commit_id='tip', f_path='vcs/nodes.py',
325 325 params=dict(annotate=1)),
326 326
327 327 extra_environ=xhr_header)
328 328 assert get_node_history(backend.alias) == json.loads(response.body)
329 329
330 330 def test_tree_search_top_level(self, backend, xhr_header):
331 331 commit = backend.repo.get_commit(commit_idx=173)
332 332 response = self.app.get(
333 333 route_path('repo_files_nodelist',
334 334 repo_name=backend.repo_name,
335 335 commit_id=commit.raw_id, f_path='/'),
336 336 extra_environ=xhr_header)
337 337 assert 'nodes' in response.json
338 338 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
339 339
340 340 def test_tree_search_missing_xhr(self, backend):
341 341 self.app.get(
342 342 route_path('repo_files_nodelist',
343 343 repo_name=backend.repo_name,
344 344 commit_id='tip', f_path='/'),
345 345 status=404)
346 346
347 347 def test_tree_search_at_path(self, backend, xhr_header):
348 348 commit = backend.repo.get_commit(commit_idx=173)
349 349 response = self.app.get(
350 350 route_path('repo_files_nodelist',
351 351 repo_name=backend.repo_name,
352 352 commit_id=commit.raw_id, f_path='/docs'),
353 353 extra_environ=xhr_header)
354 354 assert 'nodes' in response.json
355 355 nodes = response.json['nodes']
356 356 assert {'name': 'docs/api', 'type': 'dir'} in nodes
357 357 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
358 358
359 359 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
360 360 commit = backend.repo.get_commit(commit_idx=173)
361 361 response = self.app.get(
362 362 route_path('repo_files_nodelist',
363 363 repo_name=backend.repo_name,
364 364 commit_id=commit.raw_id, f_path='/docs/api'),
365 365 extra_environ=xhr_header)
366 366 assert 'nodes' in response.json
367 367 nodes = response.json['nodes']
368 368 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
369 369
370 370 def test_tree_search_at_path_missing_xhr(self, backend):
371 371 self.app.get(
372 372 route_path('repo_files_nodelist',
373 373 repo_name=backend.repo_name,
374 374 commit_id='tip', f_path='/docs'),
375 375 status=404)
376 376
377 377 def test_nodetree(self, backend, xhr_header):
378 378 commit = backend.repo.get_commit(commit_idx=173)
379 379 response = self.app.get(
380 380 route_path('repo_nodetree_full',
381 381 repo_name=backend.repo_name,
382 382 commit_id=commit.raw_id, f_path='/'),
383 383 extra_environ=xhr_header)
384 384
385 385 assert_response = response.assert_response()
386 386
387 387 for attr in ['data-commit-id', 'data-date', 'data-author']:
388 388 elements = assert_response.get_elements('[{}]'.format(attr))
389 389 assert len(elements) > 1
390 390
391 391 for element in elements:
392 392 assert element.get(attr)
393 393
394 394 def test_nodetree_if_file(self, backend, xhr_header):
395 395 commit = backend.repo.get_commit(commit_idx=173)
396 396 response = self.app.get(
397 397 route_path('repo_nodetree_full',
398 398 repo_name=backend.repo_name,
399 399 commit_id=commit.raw_id, f_path='README.rst'),
400 400 extra_environ=xhr_header)
401 401 assert response.body == ''
402 402
403 403 def test_nodetree_wrong_path(self, backend, xhr_header):
404 404 commit = backend.repo.get_commit(commit_idx=173)
405 405 response = self.app.get(
406 406 route_path('repo_nodetree_full',
407 407 repo_name=backend.repo_name,
408 408 commit_id=commit.raw_id, f_path='/dont-exist'),
409 409 extra_environ=xhr_header)
410 410
411 411 err = 'error: There is no file nor ' \
412 412 'directory at the given path'
413 413 assert err in response.body
414 414
415 415 def test_nodetree_missing_xhr(self, backend):
416 416 self.app.get(
417 417 route_path('repo_nodetree_full',
418 418 repo_name=backend.repo_name,
419 419 commit_id='tip', f_path='/'),
420 420 status=404)
421 421
422 422
423 423 @pytest.mark.usefixtures("app", "autologin_user")
424 424 class TestRawFileHandling(object):
425 425
426 426 def test_download_file(self, backend):
427 427 commit = backend.repo.get_commit(commit_idx=173)
428 428 response = self.app.get(
429 429 route_path('repo_file_download',
430 430 repo_name=backend.repo_name,
431 431 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
432 432
433 433 assert response.content_disposition == 'attachment; filename="nodes.py"; filename*=UTF-8\'\'nodes.py'
434 434 assert response.content_type == "text/x-python"
435 435
436 436 def test_download_file_wrong_cs(self, backend):
437 437 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
438 438
439 439 response = self.app.get(
440 440 route_path('repo_file_download',
441 441 repo_name=backend.repo_name,
442 442 commit_id=raw_id, f_path='vcs/nodes.svg'),
443 443 status=404)
444 444
445 445 msg = """No such commit exists for this repository"""
446 446 response.mustcontain(msg)
447 447
448 448 def test_download_file_wrong_f_path(self, backend):
449 449 commit = backend.repo.get_commit(commit_idx=173)
450 450 f_path = 'vcs/ERRORnodes.py'
451 451
452 452 response = self.app.get(
453 453 route_path('repo_file_download',
454 454 repo_name=backend.repo_name,
455 455 commit_id=commit.raw_id, f_path=f_path),
456 456 status=404)
457 457
458 458 msg = (
459 459 "There is no file nor directory at the given path: "
460 460 "`%s` at commit %s" % (f_path, commit.short_id))
461 461 response.mustcontain(msg)
462 462
463 463 def test_file_raw(self, backend):
464 464 commit = backend.repo.get_commit(commit_idx=173)
465 465 response = self.app.get(
466 466 route_path('repo_file_raw',
467 467 repo_name=backend.repo_name,
468 468 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
469 469
470 470 assert response.content_type == "text/plain"
471 471
472 472 def test_file_raw_binary(self, backend):
473 473 commit = backend.repo.get_commit()
474 474 response = self.app.get(
475 475 route_path('repo_file_raw',
476 476 repo_name=backend.repo_name,
477 477 commit_id=commit.raw_id,
478 478 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
479 479
480 480 assert response.content_disposition == 'inline'
481 481
482 482 def test_raw_file_wrong_cs(self, backend):
483 483 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
484 484
485 485 response = self.app.get(
486 486 route_path('repo_file_raw',
487 487 repo_name=backend.repo_name,
488 488 commit_id=raw_id, f_path='vcs/nodes.svg'),
489 489 status=404)
490 490
491 491 msg = """No such commit exists for this repository"""
492 492 response.mustcontain(msg)
493 493
494 494 def test_raw_wrong_f_path(self, backend):
495 495 commit = backend.repo.get_commit(commit_idx=173)
496 496 f_path = 'vcs/ERRORnodes.py'
497 497 response = self.app.get(
498 498 route_path('repo_file_raw',
499 499 repo_name=backend.repo_name,
500 500 commit_id=commit.raw_id, f_path=f_path),
501 501 status=404)
502 502
503 503 msg = (
504 504 "There is no file nor directory at the given path: "
505 505 "`%s` at commit %s" % (f_path, commit.short_id))
506 506 response.mustcontain(msg)
507 507
508 508 def test_raw_svg_should_not_be_rendered(self, backend):
509 509 backend.create_repo()
510 510 backend.ensure_file("xss.svg")
511 511 response = self.app.get(
512 512 route_path('repo_file_raw',
513 513 repo_name=backend.repo_name,
514 514 commit_id='tip', f_path='xss.svg'),)
515 515 # If the content type is image/svg+xml then it allows to render HTML
516 516 # and malicious SVG.
517 517 assert response.content_type == "text/plain"
518 518
519 519
520 520 @pytest.mark.usefixtures("app")
521 521 class TestRepositoryArchival(object):
522 522
523 523 def test_archival(self, backend):
524 524 backend.enable_downloads()
525 525 commit = backend.repo.get_commit(commit_idx=173)
526 526 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
527 527
528 528 short = commit.short_id + extension
529 529 fname = commit.raw_id + extension
530 530 filename = '%s-%s' % (backend.repo_name, short)
531 531 response = self.app.get(
532 532 route_path('repo_archivefile',
533 533 repo_name=backend.repo_name,
534 534 fname=fname))
535 535
536 536 assert response.status == '200 OK'
537 537 headers = [
538 538 ('Content-Disposition', 'attachment; filename=%s' % filename),
539 539 ('Content-Type', '%s' % content_type),
540 540 ]
541 541
542 542 for header in headers:
543 543 assert header in response.headers.items()
544 544
545 def test_archival_no_hash(self, backend):
546 backend.enable_downloads()
547 commit = backend.repo.get_commit(commit_idx=173)
548 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
549
550 short = 'plain' + extension
551 fname = commit.raw_id + extension
552 filename = '%s-%s' % (backend.repo_name, short)
553 response = self.app.get(
554 route_path('repo_archivefile',
555 repo_name=backend.repo_name,
556 fname=fname, params={'with_hash': 0}))
557
558 assert response.status == '200 OK'
559 headers = [
560 ('Content-Disposition', 'attachment; filename=%s' % filename),
561 ('Content-Type', '%s' % content_type),
562 ]
563
564 for header in headers:
565 assert header in response.headers.items()
566
545 567 @pytest.mark.parametrize('arch_ext',[
546 568 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
547 569 def test_archival_wrong_ext(self, backend, arch_ext):
548 570 backend.enable_downloads()
549 571 commit = backend.repo.get_commit(commit_idx=173)
550 572
551 573 fname = commit.raw_id + '.' + arch_ext
552 574
553 575 response = self.app.get(
554 576 route_path('repo_archivefile',
555 577 repo_name=backend.repo_name,
556 578 fname=fname))
557 579 response.mustcontain(
558 580 'Unknown archive type for: `{}`'.format(fname))
559 581
560 582 @pytest.mark.parametrize('commit_id', [
561 583 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
562 584 def test_archival_wrong_commit_id(self, backend, commit_id):
563 585 backend.enable_downloads()
564 586 fname = '%s.zip' % commit_id
565 587
566 588 response = self.app.get(
567 589 route_path('repo_archivefile',
568 590 repo_name=backend.repo_name,
569 591 fname=fname))
570 592 response.mustcontain('Unknown commit_id')
571 593
572 594
573 595 @pytest.mark.usefixtures("app")
574 596 class TestFilesDiff(object):
575 597
576 598 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
577 599 def test_file_full_diff(self, backend, diff):
578 600 commit1 = backend.repo.get_commit(commit_idx=-1)
579 601 commit2 = backend.repo.get_commit(commit_idx=-2)
580 602
581 603 response = self.app.get(
582 604 route_path('repo_files_diff',
583 605 repo_name=backend.repo_name,
584 606 f_path='README'),
585 607 params={
586 608 'diff1': commit2.raw_id,
587 609 'diff2': commit1.raw_id,
588 610 'fulldiff': '1',
589 611 'diff': diff,
590 612 })
591 613
592 614 if diff == 'diff':
593 615 # use redirect since this is OLD view redirecting to compare page
594 616 response = response.follow()
595 617
596 618 # It's a symlink to README.rst
597 619 response.mustcontain('README.rst')
598 620 response.mustcontain('No newline at end of file')
599 621
600 622 def test_file_binary_diff(self, backend):
601 623 commits = [
602 624 {'message': 'First commit'},
603 625 {'message': 'Commit with binary',
604 626 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
605 627 ]
606 628 repo = backend.create_repo(commits=commits)
607 629
608 630 response = self.app.get(
609 631 route_path('repo_files_diff',
610 632 repo_name=backend.repo_name,
611 633 f_path='file.bin'),
612 634 params={
613 635 'diff1': repo.get_commit(commit_idx=0).raw_id,
614 636 'diff2': repo.get_commit(commit_idx=1).raw_id,
615 637 'fulldiff': '1',
616 638 'diff': 'diff',
617 639 })
618 640 # use redirect since this is OLD view redirecting to compare page
619 641 response = response.follow()
620 642 response.mustcontain('Collapse 1 commit')
621 643 file_changes = (1, 0, 0)
622 644
623 645 compare_page = ComparePage(response)
624 646 compare_page.contains_change_summary(*file_changes)
625 647
626 648 if backend.alias == 'svn':
627 649 response.mustcontain('new file 10644')
628 650 # TODO(marcink): SVN doesn't yet detect binary changes
629 651 else:
630 652 response.mustcontain('new file 100644')
631 653 response.mustcontain('binary diff hidden')
632 654
633 655 def test_diff_2way(self, backend):
634 656 commit1 = backend.repo.get_commit(commit_idx=-1)
635 657 commit2 = backend.repo.get_commit(commit_idx=-2)
636 658 response = self.app.get(
637 659 route_path('repo_files_diff_2way_redirect',
638 660 repo_name=backend.repo_name,
639 661 f_path='README'),
640 662 params={
641 663 'diff1': commit2.raw_id,
642 664 'diff2': commit1.raw_id,
643 665 })
644 666 # use redirect since this is OLD view redirecting to compare page
645 667 response = response.follow()
646 668
647 669 # It's a symlink to README.rst
648 670 response.mustcontain('README.rst')
649 671 response.mustcontain('No newline at end of file')
650 672
651 673 def test_requires_one_commit_id(self, backend, autologin_user):
652 674 response = self.app.get(
653 675 route_path('repo_files_diff',
654 676 repo_name=backend.repo_name,
655 677 f_path='README.rst'),
656 678 status=400)
657 679 response.mustcontain(
658 680 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
659 681
660 682 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
661 683 repo = vcsbackend.repo
662 684 response = self.app.get(
663 685 route_path('repo_files_diff',
664 686 repo_name=repo.name,
665 687 f_path='does-not-exist-in-any-commit'),
666 688 params={
667 689 'diff1': repo[0].raw_id,
668 690 'diff2': repo[1].raw_id
669 691 })
670 692
671 693 response = response.follow()
672 694 response.mustcontain('No files')
673 695
674 696 def test_returns_redirect_if_file_not_changed(self, backend):
675 697 commit = backend.repo.get_commit(commit_idx=-1)
676 698 response = self.app.get(
677 699 route_path('repo_files_diff_2way_redirect',
678 700 repo_name=backend.repo_name,
679 701 f_path='README'),
680 702 params={
681 703 'diff1': commit.raw_id,
682 704 'diff2': commit.raw_id,
683 705 })
684 706
685 707 response = response.follow()
686 708 response.mustcontain('No files')
687 709 response.mustcontain('No commits in this compare')
688 710
689 711 def test_supports_diff_to_different_path_svn(self, backend_svn):
690 712 #TODO: check this case
691 713 return
692 714
693 715 repo = backend_svn['svn-simple-layout'].scm_instance()
694 716 commit_id_1 = '24'
695 717 commit_id_2 = '26'
696 718
697 719 response = self.app.get(
698 720 route_path('repo_files_diff',
699 721 repo_name=backend_svn.repo_name,
700 722 f_path='trunk/example.py'),
701 723 params={
702 724 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
703 725 'diff2': commit_id_2,
704 726 })
705 727
706 728 response = response.follow()
707 729 response.mustcontain(
708 730 # diff contains this
709 731 "Will print out a useful message on invocation.")
710 732
711 733 # Note: Expecting that we indicate the user what's being compared
712 734 response.mustcontain("trunk/example.py")
713 735 response.mustcontain("tags/v0.2/example.py")
714 736
715 737 def test_show_rev_redirects_to_svn_path(self, backend_svn):
716 738 #TODO: check this case
717 739 return
718 740
719 741 repo = backend_svn['svn-simple-layout'].scm_instance()
720 742 commit_id = repo[-1].raw_id
721 743
722 744 response = self.app.get(
723 745 route_path('repo_files_diff',
724 746 repo_name=backend_svn.repo_name,
725 747 f_path='trunk/example.py'),
726 748 params={
727 749 'diff1': 'branches/argparse/example.py@' + commit_id,
728 750 'diff2': commit_id,
729 751 },
730 752 status=302)
731 753 response = response.follow()
732 754 assert response.headers['Location'].endswith(
733 755 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
734 756
735 757 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
736 758 #TODO: check this case
737 759 return
738 760
739 761 repo = backend_svn['svn-simple-layout'].scm_instance()
740 762 commit_id = repo[-1].raw_id
741 763 response = self.app.get(
742 764 route_path('repo_files_diff',
743 765 repo_name=backend_svn.repo_name,
744 766 f_path='trunk/example.py'),
745 767 params={
746 768 'diff1': 'branches/argparse/example.py@' + commit_id,
747 769 'diff2': commit_id,
748 770 'show_rev': 'Show at Revision',
749 771 'annotate': 'true',
750 772 },
751 773 status=302)
752 774 response = response.follow()
753 775 assert response.headers['Location'].endswith(
754 776 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
755 777
756 778
757 779 @pytest.mark.usefixtures("app", "autologin_user")
758 780 class TestModifyFilesWithWebInterface(object):
759 781
760 782 def test_add_file_view(self, backend):
761 783 self.app.get(
762 784 route_path('repo_files_add_file',
763 785 repo_name=backend.repo_name,
764 786 commit_id='tip', f_path='/')
765 787 )
766 788
767 789 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
768 790 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
769 791 backend.create_repo()
770 792 filename = 'init.py'
771 793 response = self.app.post(
772 794 route_path('repo_files_create_file',
773 795 repo_name=backend.repo_name,
774 796 commit_id='tip', f_path='/'),
775 797 params={
776 798 'content': "",
777 799 'filename': filename,
778 800 'csrf_token': csrf_token,
779 801 },
780 802 status=302)
781 803 expected_msg = 'Successfully committed new file `{}`'.format(os.path.join(filename))
782 804 assert_session_flash(response, expected_msg)
783 805
784 806 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
785 807 commit_id = backend.repo.get_commit().raw_id
786 808 response = self.app.post(
787 809 route_path('repo_files_create_file',
788 810 repo_name=backend.repo_name,
789 811 commit_id=commit_id, f_path='/'),
790 812 params={
791 813 'content': "foo",
792 814 'csrf_token': csrf_token,
793 815 },
794 816 status=302)
795 817
796 818 assert_session_flash(response, 'No filename specified')
797 819
798 820 def test_add_file_into_repo_errors_and_no_commits(
799 821 self, backend, csrf_token):
800 822 repo = backend.create_repo()
801 823 # Create a file with no filename, it will display an error but
802 824 # the repo has no commits yet
803 825 response = self.app.post(
804 826 route_path('repo_files_create_file',
805 827 repo_name=repo.repo_name,
806 828 commit_id='tip', f_path='/'),
807 829 params={
808 830 'content': "foo",
809 831 'csrf_token': csrf_token,
810 832 },
811 833 status=302)
812 834
813 835 assert_session_flash(response, 'No filename specified')
814 836
815 837 # Not allowed, redirect to the summary
816 838 redirected = response.follow()
817 839 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
818 840
819 841 # As there are no commits, displays the summary page with the error of
820 842 # creating a file with no filename
821 843
822 844 assert redirected.request.path == summary_url
823 845
824 846 @pytest.mark.parametrize("filename, clean_filename", [
825 847 ('/abs/foo', 'abs/foo'),
826 848 ('../rel/foo', 'rel/foo'),
827 849 ('file/../foo/foo', 'file/foo/foo'),
828 850 ])
829 851 def test_add_file_into_repo_bad_filenames(self, filename, clean_filename, backend, csrf_token):
830 852 repo = backend.create_repo()
831 853 commit_id = repo.get_commit().raw_id
832 854
833 855 response = self.app.post(
834 856 route_path('repo_files_create_file',
835 857 repo_name=repo.repo_name,
836 858 commit_id=commit_id, f_path='/'),
837 859 params={
838 860 'content': "foo",
839 861 'filename': filename,
840 862 'csrf_token': csrf_token,
841 863 },
842 864 status=302)
843 865
844 866 expected_msg = 'Successfully committed new file `{}`'.format(clean_filename)
845 867 assert_session_flash(response, expected_msg)
846 868
847 869 @pytest.mark.parametrize("cnt, filename, content", [
848 870 (1, 'foo.txt', "Content"),
849 871 (2, 'dir/foo.rst', "Content"),
850 872 (3, 'dir/foo-second.rst', "Content"),
851 873 (4, 'rel/dir/foo.bar', "Content"),
852 874 ])
853 875 def test_add_file_into_empty_repo(self, cnt, filename, content, backend, csrf_token):
854 876 repo = backend.create_repo()
855 877 commit_id = repo.get_commit().raw_id
856 878 response = self.app.post(
857 879 route_path('repo_files_create_file',
858 880 repo_name=repo.repo_name,
859 881 commit_id=commit_id, f_path='/'),
860 882 params={
861 883 'content': content,
862 884 'filename': filename,
863 885 'csrf_token': csrf_token,
864 886 },
865 887 status=302)
866 888
867 889 expected_msg = 'Successfully committed new file `{}`'.format(filename)
868 890 assert_session_flash(response, expected_msg)
869 891
870 892 def test_edit_file_view(self, backend):
871 893 response = self.app.get(
872 894 route_path('repo_files_edit_file',
873 895 repo_name=backend.repo_name,
874 896 commit_id=backend.default_head_id,
875 897 f_path='vcs/nodes.py'),
876 898 status=200)
877 899 response.mustcontain("Module holding everything related to vcs nodes.")
878 900
879 901 def test_edit_file_view_not_on_branch(self, backend):
880 902 repo = backend.create_repo()
881 903 backend.ensure_file("vcs/nodes.py")
882 904
883 905 response = self.app.get(
884 906 route_path('repo_files_edit_file',
885 907 repo_name=repo.repo_name,
886 908 commit_id='tip',
887 909 f_path='vcs/nodes.py'),
888 910 status=302)
889 911 assert_session_flash(
890 912 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
891 913
892 914 def test_edit_file_view_commit_changes(self, backend, csrf_token):
893 915 repo = backend.create_repo()
894 916 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
895 917
896 918 response = self.app.post(
897 919 route_path('repo_files_update_file',
898 920 repo_name=repo.repo_name,
899 921 commit_id=backend.default_head_id,
900 922 f_path='vcs/nodes.py'),
901 923 params={
902 924 'content': "print 'hello world'",
903 925 'message': 'I committed',
904 926 'filename': "vcs/nodes.py",
905 927 'csrf_token': csrf_token,
906 928 },
907 929 status=302)
908 930 assert_session_flash(
909 931 response, 'Successfully committed changes to file `vcs/nodes.py`')
910 932 tip = repo.get_commit(commit_idx=-1)
911 933 assert tip.message == 'I committed'
912 934
913 935 def test_edit_file_view_commit_changes_default_message(self, backend,
914 936 csrf_token):
915 937 repo = backend.create_repo()
916 938 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
917 939
918 940 commit_id = (
919 941 backend.default_branch_name or
920 942 backend.repo.scm_instance().commit_ids[-1])
921 943
922 944 response = self.app.post(
923 945 route_path('repo_files_update_file',
924 946 repo_name=repo.repo_name,
925 947 commit_id=commit_id,
926 948 f_path='vcs/nodes.py'),
927 949 params={
928 950 'content': "print 'hello world'",
929 951 'message': '',
930 952 'filename': "vcs/nodes.py",
931 953 'csrf_token': csrf_token,
932 954 },
933 955 status=302)
934 956 assert_session_flash(
935 957 response, 'Successfully committed changes to file `vcs/nodes.py`')
936 958 tip = repo.get_commit(commit_idx=-1)
937 959 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
938 960
939 961 def test_delete_file_view(self, backend):
940 962 self.app.get(
941 963 route_path('repo_files_remove_file',
942 964 repo_name=backend.repo_name,
943 965 commit_id=backend.default_head_id,
944 966 f_path='vcs/nodes.py'),
945 967 status=200)
946 968
947 969 def test_delete_file_view_not_on_branch(self, backend):
948 970 repo = backend.create_repo()
949 971 backend.ensure_file('vcs/nodes.py')
950 972
951 973 response = self.app.get(
952 974 route_path('repo_files_remove_file',
953 975 repo_name=repo.repo_name,
954 976 commit_id='tip',
955 977 f_path='vcs/nodes.py'),
956 978 status=302)
957 979 assert_session_flash(
958 980 response, 'Cannot modify file. Given commit `tip` is not head of a branch.')
959 981
960 982 def test_delete_file_view_commit_changes(self, backend, csrf_token):
961 983 repo = backend.create_repo()
962 984 backend.ensure_file("vcs/nodes.py")
963 985
964 986 response = self.app.post(
965 987 route_path('repo_files_delete_file',
966 988 repo_name=repo.repo_name,
967 989 commit_id=backend.default_head_id,
968 990 f_path='vcs/nodes.py'),
969 991 params={
970 992 'message': 'i commited',
971 993 'csrf_token': csrf_token,
972 994 },
973 995 status=302)
974 996 assert_session_flash(
975 997 response, 'Successfully deleted file `vcs/nodes.py`')
976 998
977 999
978 1000 @pytest.mark.usefixtures("app")
979 1001 class TestFilesViewOtherCases(object):
980 1002
981 1003 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
982 1004 self, backend_stub, autologin_regular_user, user_regular,
983 1005 user_util):
984 1006
985 1007 repo = backend_stub.create_repo()
986 1008 user_util.grant_user_permission_to_repo(
987 1009 repo, user_regular, 'repository.write')
988 1010 response = self.app.get(
989 1011 route_path('repo_files',
990 1012 repo_name=repo.repo_name,
991 1013 commit_id='tip', f_path='/'))
992 1014
993 1015 repo_file_add_url = route_path(
994 1016 'repo_files_add_file',
995 1017 repo_name=repo.repo_name,
996 1018 commit_id=0, f_path='')
997 1019
998 1020 assert_session_flash(
999 1021 response,
1000 1022 'There are no files yet. <a class="alert-link" '
1001 1023 'href="{}">Click here to add a new file.</a>'
1002 1024 .format(repo_file_add_url))
1003 1025
1004 1026 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1005 1027 self, backend_stub, autologin_regular_user):
1006 1028 repo = backend_stub.create_repo()
1007 1029 # init session for anon user
1008 1030 route_path('repo_summary', repo_name=repo.repo_name)
1009 1031
1010 1032 repo_file_add_url = route_path(
1011 1033 'repo_files_add_file',
1012 1034 repo_name=repo.repo_name,
1013 1035 commit_id=0, f_path='')
1014 1036
1015 1037 response = self.app.get(
1016 1038 route_path('repo_files',
1017 1039 repo_name=repo.repo_name,
1018 1040 commit_id='tip', f_path='/'))
1019 1041
1020 1042 assert_session_flash(response, no_=repo_file_add_url)
1021 1043
1022 1044 @pytest.mark.parametrize('file_node', [
1023 1045 'archive/file.zip',
1024 1046 'diff/my-file.txt',
1025 1047 'render.py',
1026 1048 'render',
1027 1049 'remove_file',
1028 1050 'remove_file/to-delete.txt',
1029 1051 ])
1030 1052 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1031 1053 backend.create_repo()
1032 1054 backend.ensure_file(file_node)
1033 1055
1034 1056 self.app.get(
1035 1057 route_path('repo_files',
1036 1058 repo_name=backend.repo_name,
1037 1059 commit_id='tip', f_path=file_node),
1038 1060 status=200)
1039 1061
1040 1062
1041 1063 class TestAdjustFilePathForSvn(object):
1042 1064 """
1043 1065 SVN specific adjustments of node history in RepoFilesView.
1044 1066 """
1045 1067
1046 1068 def test_returns_path_relative_to_matched_reference(self):
1047 1069 repo = self._repo(branches=['trunk'])
1048 1070 self.assert_file_adjustment('trunk/file', 'file', repo)
1049 1071
1050 1072 def test_does_not_modify_file_if_no_reference_matches(self):
1051 1073 repo = self._repo(branches=['trunk'])
1052 1074 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1053 1075
1054 1076 def test_does_not_adjust_partial_directory_names(self):
1055 1077 repo = self._repo(branches=['trun'])
1056 1078 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1057 1079
1058 1080 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1059 1081 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1060 1082 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1061 1083
1062 1084 def assert_file_adjustment(self, f_path, expected, repo):
1063 1085 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1064 1086 assert result == expected
1065 1087
1066 1088 def _repo(self, branches=None):
1067 1089 repo = mock.Mock()
1068 1090 repo.branches = OrderedDict((name, '0') for name in branches or [])
1069 1091 repo.tags = {}
1070 1092 return repo
@@ -1,358 +1,358 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import logging
23 23
24 24 from pyramid.httpexceptions import HTTPNotFound, HTTPFound
25 25
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30 import rhodecode.lib.helpers as h
31 31 from rhodecode.lib.auth import (
32 32 LoginRequired, HasRepoPermissionAnyDecorator)
33 33
34 34 from rhodecode.lib.ext_json import json
35 35 from rhodecode.lib.graphmod import _colored, _dagwalker
36 36 from rhodecode.lib.helpers import RepoPage
37 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool
37 from rhodecode.lib.utils2 import safe_int, safe_str, str2bool, safe_unicode
38 38 from rhodecode.lib.vcs.exceptions import (
39 39 RepositoryError, CommitDoesNotExistError,
40 40 CommitError, NodeDoesNotExistError, EmptyRepositoryError)
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44 DEFAULT_CHANGELOG_SIZE = 20
45 45
46 46
47 47 class RepoChangelogView(RepoAppView):
48 48
49 49 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
50 50 """
51 51 This is a safe way to get commit. If an error occurs it redirects to
52 52 tip with proper message
53 53
54 54 :param commit_id: id of commit to fetch
55 55 :param redirect_after: toggle redirection
56 56 """
57 57 _ = self.request.translate
58 58
59 59 try:
60 60 return self.rhodecode_vcs_repo.get_commit(commit_id)
61 61 except EmptyRepositoryError:
62 62 if not redirect_after:
63 63 return None
64 64
65 65 h.flash(h.literal(
66 66 _('There are no commits yet')), category='warning')
67 67 raise HTTPFound(
68 68 h.route_path('repo_summary', repo_name=self.db_repo_name))
69 69
70 70 except (CommitDoesNotExistError, LookupError):
71 71 msg = _('No such commit exists for this repository')
72 72 h.flash(msg, category='error')
73 73 raise HTTPNotFound()
74 74 except RepositoryError as e:
75 75 h.flash(safe_str(h.escape(e)), category='error')
76 76 raise HTTPNotFound()
77 77
78 78 def _graph(self, repo, commits, prev_data=None, next_data=None):
79 79 """
80 80 Generates a DAG graph for repo
81 81
82 82 :param repo: repo instance
83 83 :param commits: list of commits
84 84 """
85 85 if not commits:
86 86 return json.dumps([]), json.dumps([])
87 87
88 88 def serialize(commit, parents=True):
89 89 data = dict(
90 90 raw_id=commit.raw_id,
91 91 idx=commit.idx,
92 92 branch=None,
93 93 )
94 94 if parents:
95 95 data['parents'] = [
96 96 serialize(x, parents=False) for x in commit.parents]
97 97 return data
98 98
99 99 prev_data = prev_data or []
100 100 next_data = next_data or []
101 101
102 102 current = [serialize(x) for x in commits]
103 103 commits = prev_data + current + next_data
104 104
105 105 dag = _dagwalker(repo, commits)
106 106
107 107 data = [[commit_id, vtx, edges, branch]
108 108 for commit_id, vtx, edges, branch in _colored(dag)]
109 109 return json.dumps(data), json.dumps(current)
110 110
111 111 def _check_if_valid_branch(self, branch_name, repo_name, f_path):
112 112 if branch_name not in self.rhodecode_vcs_repo.branches_all:
113 h.flash('Branch {} is not found.'.format(h.escape(branch_name)),
113 h.flash(u'Branch {} is not found.'.format(h.escape(safe_unicode(branch_name))),
114 114 category='warning')
115 115 redirect_url = h.route_path(
116 116 'repo_commits_file', repo_name=repo_name,
117 117 commit_id=branch_name, f_path=f_path or '')
118 118 raise HTTPFound(redirect_url)
119 119
120 120 def _load_changelog_data(
121 121 self, c, collection, page, chunk_size, branch_name=None,
122 122 dynamic=False, f_path=None, commit_id=None):
123 123
124 124 def url_generator(page_num):
125 125 query_params = {
126 126 'page': page_num
127 127 }
128 128
129 129 if branch_name:
130 130 query_params.update({
131 131 'branch': branch_name
132 132 })
133 133
134 134 if f_path:
135 135 # changelog for file
136 136 return h.route_path(
137 137 'repo_commits_file',
138 138 repo_name=c.rhodecode_db_repo.repo_name,
139 139 commit_id=commit_id, f_path=f_path,
140 140 _query=query_params)
141 141 else:
142 142 return h.route_path(
143 143 'repo_commits',
144 144 repo_name=c.rhodecode_db_repo.repo_name, _query=query_params)
145 145
146 146 c.total_cs = len(collection)
147 147 c.showing_commits = min(chunk_size, c.total_cs)
148 148 c.pagination = RepoPage(collection, page=page, item_count=c.total_cs,
149 149 items_per_page=chunk_size, url_maker=url_generator)
150 150
151 151 c.next_page = c.pagination.next_page
152 152 c.prev_page = c.pagination.previous_page
153 153
154 154 if dynamic:
155 155 if self.request.GET.get('chunk') != 'next':
156 156 c.next_page = None
157 157 if self.request.GET.get('chunk') != 'prev':
158 158 c.prev_page = None
159 159
160 160 page_commit_ids = [x.raw_id for x in c.pagination]
161 161 c.comments = c.rhodecode_db_repo.get_comments(page_commit_ids)
162 162 c.statuses = c.rhodecode_db_repo.statuses(page_commit_ids)
163 163
164 164 def load_default_context(self):
165 165 c = self._get_local_tmpl_context(include_app_defaults=True)
166 166
167 167 c.rhodecode_repo = self.rhodecode_vcs_repo
168 168
169 169 return c
170 170
171 171 def _get_preload_attrs(self):
172 172 pre_load = ['author', 'branch', 'date', 'message', 'parents',
173 173 'obsolete', 'phase', 'hidden']
174 174 return pre_load
175 175
176 176 @LoginRequired()
177 177 @HasRepoPermissionAnyDecorator(
178 178 'repository.read', 'repository.write', 'repository.admin')
179 179 def repo_changelog(self):
180 180 c = self.load_default_context()
181 181
182 182 commit_id = self.request.matchdict.get('commit_id')
183 183 f_path = self._get_f_path(self.request.matchdict)
184 184 show_hidden = str2bool(self.request.GET.get('evolve'))
185 185
186 186 chunk_size = 20
187 187
188 188 c.branch_name = branch_name = self.request.GET.get('branch') or ''
189 189 c.book_name = book_name = self.request.GET.get('bookmark') or ''
190 190 c.f_path = f_path
191 191 c.commit_id = commit_id
192 192 c.show_hidden = show_hidden
193 193
194 194 hist_limit = safe_int(self.request.GET.get('limit')) or None
195 195
196 196 p = safe_int(self.request.GET.get('page', 1), 1)
197 197
198 198 c.selected_name = branch_name or book_name
199 199 if not commit_id and branch_name:
200 200 self._check_if_valid_branch(branch_name, self.db_repo_name, f_path)
201 201
202 202 c.changelog_for_path = f_path
203 203 pre_load = self._get_preload_attrs()
204 204
205 205 partial_xhr = self.request.environ.get('HTTP_X_PARTIAL_XHR')
206 206
207 207 try:
208 208 if f_path:
209 209 log.debug('generating changelog for path %s', f_path)
210 210 # get the history for the file !
211 211 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
212 212
213 213 try:
214 214 collection = base_commit.get_path_history(
215 215 f_path, limit=hist_limit, pre_load=pre_load)
216 216 if collection and partial_xhr:
217 217 # for ajax call we remove first one since we're looking
218 218 # at it right now in the context of a file commit
219 219 collection.pop(0)
220 220 except (NodeDoesNotExistError, CommitError):
221 221 # this node is not present at tip!
222 222 try:
223 223 commit = self._get_commit_or_redirect(commit_id)
224 224 collection = commit.get_path_history(f_path)
225 225 except RepositoryError as e:
226 226 h.flash(safe_str(e), category='warning')
227 227 redirect_url = h.route_path(
228 228 'repo_commits', repo_name=self.db_repo_name)
229 229 raise HTTPFound(redirect_url)
230 230 collection = list(reversed(collection))
231 231 else:
232 232 collection = self.rhodecode_vcs_repo.get_commits(
233 233 branch_name=branch_name, show_hidden=show_hidden,
234 234 pre_load=pre_load, translate_tags=False)
235 235
236 236 self._load_changelog_data(
237 237 c, collection, p, chunk_size, c.branch_name,
238 238 f_path=f_path, commit_id=commit_id)
239 239
240 240 except EmptyRepositoryError as e:
241 241 h.flash(safe_str(h.escape(e)), category='warning')
242 242 raise HTTPFound(
243 243 h.route_path('repo_summary', repo_name=self.db_repo_name))
244 244 except HTTPFound:
245 245 raise
246 246 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
247 247 log.exception(safe_str(e))
248 248 h.flash(safe_str(h.escape(e)), category='error')
249 249
250 250 if commit_id:
251 251 # from single commit page, we redirect to main commits
252 252 raise HTTPFound(
253 253 h.route_path('repo_commits', repo_name=self.db_repo_name))
254 254 else:
255 255 # otherwise we redirect to summary
256 256 raise HTTPFound(
257 257 h.route_path('repo_summary', repo_name=self.db_repo_name))
258 258
259 259 if partial_xhr or self.request.environ.get('HTTP_X_PJAX'):
260 260 # case when loading dynamic file history in file view
261 261 # loading from ajax, we don't want the first result, it's popped
262 262 # in the code above
263 263 html = render(
264 264 'rhodecode:templates/commits/changelog_file_history.mako',
265 265 self._get_template_context(c), self.request)
266 266 return Response(html)
267 267
268 268 commit_ids = []
269 269 if not f_path:
270 270 # only load graph data when not in file history mode
271 271 commit_ids = c.pagination
272 272
273 273 c.graph_data, c.graph_commits = self._graph(
274 274 self.rhodecode_vcs_repo, commit_ids)
275 275
276 276 return self._get_template_context(c)
277 277
278 278 @LoginRequired()
279 279 @HasRepoPermissionAnyDecorator(
280 280 'repository.read', 'repository.write', 'repository.admin')
281 281 def repo_commits_elements(self):
282 282 c = self.load_default_context()
283 283 commit_id = self.request.matchdict.get('commit_id')
284 284 f_path = self._get_f_path(self.request.matchdict)
285 285 show_hidden = str2bool(self.request.GET.get('evolve'))
286 286
287 287 chunk_size = 20
288 288 hist_limit = safe_int(self.request.GET.get('limit')) or None
289 289
290 290 def wrap_for_error(err):
291 291 html = '<tr>' \
292 292 '<td colspan="9" class="alert alert-error">ERROR: {}</td>' \
293 293 '</tr>'.format(err)
294 294 return Response(html)
295 295
296 296 c.branch_name = branch_name = self.request.GET.get('branch') or ''
297 297 c.book_name = book_name = self.request.GET.get('bookmark') or ''
298 298 c.f_path = f_path
299 299 c.commit_id = commit_id
300 300 c.show_hidden = show_hidden
301 301
302 302 c.selected_name = branch_name or book_name
303 303 if branch_name and branch_name not in self.rhodecode_vcs_repo.branches_all:
304 304 return wrap_for_error(
305 305 safe_str('Branch: {} is not valid'.format(branch_name)))
306 306
307 307 pre_load = self._get_preload_attrs()
308 308
309 309 if f_path:
310 310 try:
311 311 base_commit = self.rhodecode_vcs_repo.get_commit(commit_id)
312 312 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
313 313 log.exception(safe_str(e))
314 314 raise HTTPFound(
315 315 h.route_path('repo_commits', repo_name=self.db_repo_name))
316 316
317 317 collection = base_commit.get_path_history(
318 318 f_path, limit=hist_limit, pre_load=pre_load)
319 319 collection = list(reversed(collection))
320 320 else:
321 321 collection = self.rhodecode_vcs_repo.get_commits(
322 322 branch_name=branch_name, show_hidden=show_hidden, pre_load=pre_load,
323 323 translate_tags=False)
324 324
325 325 p = safe_int(self.request.GET.get('page', 1), 1)
326 326 try:
327 327 self._load_changelog_data(
328 328 c, collection, p, chunk_size, dynamic=True,
329 329 f_path=f_path, commit_id=commit_id)
330 330 except EmptyRepositoryError as e:
331 331 return wrap_for_error(safe_str(e))
332 332 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
333 333 log.exception('Failed to fetch commits')
334 334 return wrap_for_error(safe_str(e))
335 335
336 336 prev_data = None
337 337 next_data = None
338 338
339 339 try:
340 340 prev_graph = json.loads(self.request.POST.get('graph') or '{}')
341 341 except json.JSONDecodeError:
342 342 prev_graph = {}
343 343
344 344 if self.request.GET.get('chunk') == 'prev':
345 345 next_data = prev_graph
346 346 elif self.request.GET.get('chunk') == 'next':
347 347 prev_data = prev_graph
348 348
349 349 commit_ids = []
350 350 if not f_path:
351 351 # only load graph data when not in file history mode
352 352 commit_ids = c.pagination
353 353
354 354 c.graph_data, c.graph_commits = self._graph(
355 355 self.rhodecode_vcs_repo, commit_ids,
356 356 prev_data=prev_data, next_data=next_data)
357 357
358 358 return self._get_template_context(c)
@@ -1,809 +1,813 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 from pyramid.httpexceptions import (
25 25 HTTPNotFound, HTTPBadRequest, HTTPFound, HTTPForbidden, HTTPConflict)
26 26 from pyramid.renderers import render
27 27 from pyramid.response import Response
28 28
29 29 from rhodecode.apps._base import RepoAppView
30 30 from rhodecode.apps.file_store import utils as store_utils
31 31 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, FileOverSizeException
32 32
33 33 from rhodecode.lib import diffs, codeblocks, channelstream
34 34 from rhodecode.lib.auth import (
35 35 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous, CSRFRequired)
36 36 from rhodecode.lib.ext_json import json
37 37 from rhodecode.lib.compat import OrderedDict
38 38 from rhodecode.lib.diffs import (
39 39 cache_diff, load_cached_diff, diff_cache_exist, get_diff_context,
40 40 get_diff_whitespace_flag)
41 41 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError, CommentVersionMismatch
42 42 import rhodecode.lib.helpers as h
43 43 from rhodecode.lib.utils2 import safe_unicode, str2bool, StrictAttributeDict, safe_str
44 44 from rhodecode.lib.vcs.backends.base import EmptyCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 RepositoryError, CommitDoesNotExistError)
47 47 from rhodecode.model.db import ChangesetComment, ChangesetStatus, FileStore, \
48 48 ChangesetCommentHistory
49 49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 50 from rhodecode.model.comment import CommentsModel
51 51 from rhodecode.model.meta import Session
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56
57 57 def _update_with_GET(params, request):
58 58 for k in ['diff1', 'diff2', 'diff']:
59 59 params[k] += request.GET.getall(k)
60 60
61 61
62 62 class RepoCommitsView(RepoAppView):
63 63 def load_default_context(self):
64 64 c = self._get_local_tmpl_context(include_app_defaults=True)
65 65 c.rhodecode_repo = self.rhodecode_vcs_repo
66 66
67 67 return c
68 68
69 69 def _is_diff_cache_enabled(self, target_repo):
70 70 caching_enabled = self._get_general_setting(
71 71 target_repo, 'rhodecode_diff_cache')
72 72 log.debug('Diff caching enabled: %s', caching_enabled)
73 73 return caching_enabled
74 74
75 75 def _commit(self, commit_id_range, method):
76 76 _ = self.request.translate
77 77 c = self.load_default_context()
78 78 c.fulldiff = self.request.GET.get('fulldiff')
79 79 redirect_to_combined = str2bool(self.request.GET.get('redirect_combined'))
80 80
81 81 # fetch global flags of ignore ws or context lines
82 82 diff_context = get_diff_context(self.request)
83 83 hide_whitespace_changes = get_diff_whitespace_flag(self.request)
84 84
85 85 # diff_limit will cut off the whole diff if the limit is applied
86 86 # otherwise it will just hide the big files from the front-end
87 87 diff_limit = c.visual.cut_off_limit_diff
88 88 file_limit = c.visual.cut_off_limit_file
89 89
90 90 # get ranges of commit ids if preset
91 91 commit_range = commit_id_range.split('...')[:2]
92 92
93 93 try:
94 94 pre_load = ['affected_files', 'author', 'branch', 'date',
95 95 'message', 'parents']
96 96 if self.rhodecode_vcs_repo.alias == 'hg':
97 97 pre_load += ['hidden', 'obsolete', 'phase']
98 98
99 99 if len(commit_range) == 2:
100 100 commits = self.rhodecode_vcs_repo.get_commits(
101 101 start_id=commit_range[0], end_id=commit_range[1],
102 102 pre_load=pre_load, translate_tags=False)
103 103 commits = list(commits)
104 104 else:
105 105 commits = [self.rhodecode_vcs_repo.get_commit(
106 106 commit_id=commit_id_range, pre_load=pre_load)]
107 107
108 108 c.commit_ranges = commits
109 109 if not c.commit_ranges:
110 110 raise RepositoryError('The commit range returned an empty result')
111 111 except CommitDoesNotExistError as e:
112 112 msg = _('No such commit exists. Org exception: `{}`').format(safe_str(e))
113 113 h.flash(msg, category='error')
114 114 raise HTTPNotFound()
115 115 except Exception:
116 116 log.exception("General failure")
117 117 raise HTTPNotFound()
118 118 single_commit = len(c.commit_ranges) == 1
119 119
120 120 if redirect_to_combined and not single_commit:
121 121 source_ref = getattr(c.commit_ranges[0].parents[0]
122 122 if c.commit_ranges[0].parents else h.EmptyCommit(), 'raw_id')
123 123 target_ref = c.commit_ranges[-1].raw_id
124 124 next_url = h.route_path(
125 125 'repo_compare',
126 126 repo_name=c.repo_name,
127 127 source_ref_type='rev',
128 128 source_ref=source_ref,
129 129 target_ref_type='rev',
130 130 target_ref=target_ref)
131 131 raise HTTPFound(next_url)
132 132
133 133 c.changes = OrderedDict()
134 134 c.lines_added = 0
135 135 c.lines_deleted = 0
136 136
137 137 # auto collapse if we have more than limit
138 138 collapse_limit = diffs.DiffProcessor._collapse_commits_over
139 139 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
140 140
141 141 c.commit_statuses = ChangesetStatus.STATUSES
142 142 c.inline_comments = []
143 143 c.files = []
144 144
145 145 c.comments = []
146 146 c.unresolved_comments = []
147 147 c.resolved_comments = []
148 148
149 149 # Single commit
150 150 if single_commit:
151 151 commit = c.commit_ranges[0]
152 152 c.comments = CommentsModel().get_comments(
153 153 self.db_repo.repo_id,
154 154 revision=commit.raw_id)
155 155
156 156 # comments from PR
157 157 statuses = ChangesetStatusModel().get_statuses(
158 158 self.db_repo.repo_id, commit.raw_id,
159 159 with_revisions=True)
160 160
161 161 prs = set()
162 162 reviewers = list()
163 163 reviewers_duplicates = set() # to not have duplicates from multiple votes
164 164 for c_status in statuses:
165 165
166 166 # extract associated pull-requests from votes
167 167 if c_status.pull_request:
168 168 prs.add(c_status.pull_request)
169 169
170 170 # extract reviewers
171 171 _user_id = c_status.author.user_id
172 172 if _user_id not in reviewers_duplicates:
173 173 reviewers.append(
174 174 StrictAttributeDict({
175 175 'user': c_status.author,
176 176
177 177 # fake attributed for commit, page that we don't have
178 178 # but we share the display with PR page
179 179 'mandatory': False,
180 180 'reasons': [],
181 181 'rule_user_group_data': lambda: None
182 182 })
183 183 )
184 184 reviewers_duplicates.add(_user_id)
185 185
186 186 c.reviewers_count = len(reviewers)
187 187 c.observers_count = 0
188 188
189 189 # from associated statuses, check the pull requests, and
190 190 # show comments from them
191 191 for pr in prs:
192 192 c.comments.extend(pr.comments)
193 193
194 194 c.unresolved_comments = CommentsModel()\
195 195 .get_commit_unresolved_todos(commit.raw_id)
196 196 c.resolved_comments = CommentsModel()\
197 197 .get_commit_resolved_todos(commit.raw_id)
198 198
199 199 c.inline_comments_flat = CommentsModel()\
200 200 .get_commit_inline_comments(commit.raw_id)
201 201
202 202 review_statuses = ChangesetStatusModel().aggregate_votes_by_user(
203 203 statuses, reviewers)
204 204
205 205 c.commit_review_status = ChangesetStatus.STATUS_NOT_REVIEWED
206 206
207 207 c.commit_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
208 208
209 209 for review_obj, member, reasons, mandatory, status in review_statuses:
210 210 member_reviewer = h.reviewer_as_json(
211 211 member, reasons=reasons, mandatory=mandatory, role=None,
212 212 user_group=None
213 213 )
214 214
215 215 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
216 216 member_reviewer['review_status'] = current_review_status
217 217 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
218 218 member_reviewer['allowed_to_update'] = False
219 219 c.commit_set_reviewers_data_json['reviewers'].append(member_reviewer)
220 220
221 221 c.commit_set_reviewers_data_json = json.dumps(c.commit_set_reviewers_data_json)
222 222
223 223 # NOTE(marcink): this uses the same voting logic as in pull-requests
224 224 c.commit_review_status = ChangesetStatusModel().calculate_status(review_statuses)
225 225 c.commit_broadcast_channel = channelstream.comment_channel(c.repo_name, commit_obj=commit)
226 226
227 227 diff = None
228 228 # Iterate over ranges (default commit view is always one commit)
229 229 for commit in c.commit_ranges:
230 230 c.changes[commit.raw_id] = []
231 231
232 232 commit2 = commit
233 233 commit1 = commit.first_parent
234 234
235 235 if method == 'show':
236 236 inline_comments = CommentsModel().get_inline_comments(
237 237 self.db_repo.repo_id, revision=commit.raw_id)
238 238 c.inline_cnt = len(CommentsModel().get_inline_comments_as_list(
239 239 inline_comments))
240 240 c.inline_comments = inline_comments
241 241
242 242 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(
243 243 self.db_repo)
244 244 cache_file_path = diff_cache_exist(
245 245 cache_path, 'diff', commit.raw_id,
246 246 hide_whitespace_changes, diff_context, c.fulldiff)
247 247
248 248 caching_enabled = self._is_diff_cache_enabled(self.db_repo)
249 249 force_recache = str2bool(self.request.GET.get('force_recache'))
250 250
251 251 cached_diff = None
252 252 if caching_enabled:
253 253 cached_diff = load_cached_diff(cache_file_path)
254 254
255 255 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
256 256 if not force_recache and has_proper_diff_cache:
257 257 diffset = cached_diff['diff']
258 258 else:
259 259 vcs_diff = self.rhodecode_vcs_repo.get_diff(
260 260 commit1, commit2,
261 261 ignore_whitespace=hide_whitespace_changes,
262 262 context=diff_context)
263 263
264 264 diff_processor = diffs.DiffProcessor(
265 265 vcs_diff, format='newdiff', diff_limit=diff_limit,
266 266 file_limit=file_limit, show_full_diff=c.fulldiff)
267 267
268 268 _parsed = diff_processor.prepare()
269 269
270 270 diffset = codeblocks.DiffSet(
271 271 repo_name=self.db_repo_name,
272 272 source_node_getter=codeblocks.diffset_node_getter(commit1),
273 273 target_node_getter=codeblocks.diffset_node_getter(commit2))
274 274
275 275 diffset = self.path_filter.render_patchset_filtered(
276 276 diffset, _parsed, commit1.raw_id, commit2.raw_id)
277 277
278 278 # save cached diff
279 279 if caching_enabled:
280 280 cache_diff(cache_file_path, diffset, None)
281 281
282 282 c.limited_diff = diffset.limited_diff
283 283 c.changes[commit.raw_id] = diffset
284 284 else:
285 285 # TODO(marcink): no cache usage here...
286 286 _diff = self.rhodecode_vcs_repo.get_diff(
287 287 commit1, commit2,
288 288 ignore_whitespace=hide_whitespace_changes, context=diff_context)
289 289 diff_processor = diffs.DiffProcessor(
290 290 _diff, format='newdiff', diff_limit=diff_limit,
291 291 file_limit=file_limit, show_full_diff=c.fulldiff)
292 292 # downloads/raw we only need RAW diff nothing else
293 293 diff = self.path_filter.get_raw_patch(diff_processor)
294 294 c.changes[commit.raw_id] = [None, None, None, None, diff, None, None]
295 295
296 296 # sort comments by how they were generated
297 297 c.comments = sorted(c.comments, key=lambda x: x.comment_id)
298 298 c.at_version_num = None
299 299
300 300 if len(c.commit_ranges) == 1:
301 301 c.commit = c.commit_ranges[0]
302 302 c.parent_tmpl = ''.join(
303 303 '# Parent %s\n' % x.raw_id for x in c.commit.parents)
304 304
305 305 if method == 'download':
306 306 response = Response(diff)
307 307 response.content_type = 'text/plain'
308 308 response.content_disposition = (
309 309 'attachment; filename=%s.diff' % commit_id_range[:12])
310 310 return response
311 311 elif method == 'patch':
312 312 c.diff = safe_unicode(diff)
313 313 patch = render(
314 314 'rhodecode:templates/changeset/patch_changeset.mako',
315 315 self._get_template_context(c), self.request)
316 316 response = Response(patch)
317 317 response.content_type = 'text/plain'
318 318 return response
319 319 elif method == 'raw':
320 320 response = Response(diff)
321 321 response.content_type = 'text/plain'
322 322 return response
323 323 elif method == 'show':
324 324 if len(c.commit_ranges) == 1:
325 325 html = render(
326 326 'rhodecode:templates/changeset/changeset.mako',
327 327 self._get_template_context(c), self.request)
328 328 return Response(html)
329 329 else:
330 330 c.ancestor = None
331 331 c.target_repo = self.db_repo
332 332 html = render(
333 333 'rhodecode:templates/changeset/changeset_range.mako',
334 334 self._get_template_context(c), self.request)
335 335 return Response(html)
336 336
337 337 raise HTTPBadRequest()
338 338
339 339 @LoginRequired()
340 340 @HasRepoPermissionAnyDecorator(
341 341 'repository.read', 'repository.write', 'repository.admin')
342 342 def repo_commit_show(self):
343 343 commit_id = self.request.matchdict['commit_id']
344 344 return self._commit(commit_id, method='show')
345 345
346 346 @LoginRequired()
347 347 @HasRepoPermissionAnyDecorator(
348 348 'repository.read', 'repository.write', 'repository.admin')
349 349 def repo_commit_raw(self):
350 350 commit_id = self.request.matchdict['commit_id']
351 351 return self._commit(commit_id, method='raw')
352 352
353 353 @LoginRequired()
354 354 @HasRepoPermissionAnyDecorator(
355 355 'repository.read', 'repository.write', 'repository.admin')
356 356 def repo_commit_patch(self):
357 357 commit_id = self.request.matchdict['commit_id']
358 358 return self._commit(commit_id, method='patch')
359 359
360 360 @LoginRequired()
361 361 @HasRepoPermissionAnyDecorator(
362 362 'repository.read', 'repository.write', 'repository.admin')
363 363 def repo_commit_download(self):
364 364 commit_id = self.request.matchdict['commit_id']
365 365 return self._commit(commit_id, method='download')
366 366
367 367 def _commit_comments_create(self, commit_id, comments):
368 368 _ = self.request.translate
369 369 data = {}
370 370 if not comments:
371 371 return
372 372
373 373 commit = self.db_repo.get_commit(commit_id)
374 374
375 375 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
376 376 for entry in comments:
377 377 c = self.load_default_context()
378 378 comment_type = entry['comment_type']
379 379 text = entry['text']
380 380 status = entry['status']
381 381 is_draft = str2bool(entry['is_draft'])
382 382 resolves_comment_id = entry['resolves_comment_id']
383 383 f_path = entry['f_path']
384 384 line_no = entry['line']
385 385 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
386 386
387 387 if status:
388 388 text = text or (_('Status change %(transition_icon)s %(status)s')
389 389 % {'transition_icon': '>',
390 390 'status': ChangesetStatus.get_status_lbl(status)})
391 391
392 392 comment = CommentsModel().create(
393 393 text=text,
394 394 repo=self.db_repo.repo_id,
395 395 user=self._rhodecode_db_user.user_id,
396 396 commit_id=commit_id,
397 397 f_path=f_path,
398 398 line_no=line_no,
399 399 status_change=(ChangesetStatus.get_status_lbl(status)
400 400 if status else None),
401 401 status_change_type=status,
402 402 comment_type=comment_type,
403 403 is_draft=is_draft,
404 404 resolves_comment_id=resolves_comment_id,
405 405 auth_user=self._rhodecode_user,
406 406 send_email=not is_draft, # skip notification for draft comments
407 407 )
408 408 is_inline = comment.is_inline
409 409
410 410 # get status if set !
411 411 if status:
412 412 # `dont_allow_on_closed_pull_request = True` means
413 413 # if latest status was from pull request and it's closed
414 414 # disallow changing status !
415 415
416 416 try:
417 417 ChangesetStatusModel().set_status(
418 418 self.db_repo.repo_id,
419 419 status,
420 420 self._rhodecode_db_user.user_id,
421 421 comment,
422 422 revision=commit_id,
423 423 dont_allow_on_closed_pull_request=True
424 424 )
425 425 except StatusChangeOnClosedPullRequestError:
426 426 msg = _('Changing the status of a commit associated with '
427 427 'a closed pull request is not allowed')
428 428 log.exception(msg)
429 429 h.flash(msg, category='warning')
430 430 raise HTTPFound(h.route_path(
431 431 'repo_commit', repo_name=self.db_repo_name,
432 432 commit_id=commit_id))
433 433
434 434 Session().flush()
435 435 # this is somehow required to get access to some relationship
436 436 # loaded on comment
437 437 Session().refresh(comment)
438 438
439 439 # skip notifications for drafts
440 440 if not is_draft:
441 441 CommentsModel().trigger_commit_comment_hook(
442 442 self.db_repo, self._rhodecode_user, 'create',
443 443 data={'comment': comment, 'commit': commit})
444 444
445 445 comment_id = comment.comment_id
446 446 data[comment_id] = {
447 447 'target_id': target_elem_id
448 448 }
449 449 Session().flush()
450 450
451 451 c.co = comment
452 452 c.at_version_num = 0
453 453 c.is_new = True
454 454 rendered_comment = render(
455 455 'rhodecode:templates/changeset/changeset_comment_block.mako',
456 456 self._get_template_context(c), self.request)
457 457
458 458 data[comment_id].update(comment.get_dict())
459 459 data[comment_id].update({'rendered_text': rendered_comment})
460 460
461 461 # finalize, commit and redirect
462 462 Session().commit()
463 463
464 464 # skip channelstream for draft comments
465 465 if not all_drafts:
466 466 comment_broadcast_channel = channelstream.comment_channel(
467 467 self.db_repo_name, commit_obj=commit)
468 468
469 469 comment_data = data
470 470 posted_comment_type = 'inline' if is_inline else 'general'
471 471 if len(data) == 1:
472 472 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
473 473 else:
474 474 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
475 475
476 476 channelstream.comment_channelstream_push(
477 477 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
478 478 comment_data=comment_data)
479 479
480 480 return data
481 481
482 482 @LoginRequired()
483 483 @NotAnonymous()
484 484 @HasRepoPermissionAnyDecorator(
485 485 'repository.read', 'repository.write', 'repository.admin')
486 486 @CSRFRequired()
487 487 def repo_commit_comment_create(self):
488 488 _ = self.request.translate
489 489 commit_id = self.request.matchdict['commit_id']
490 490
491 491 multi_commit_ids = []
492 492 for _commit_id in self.request.POST.get('commit_ids', '').split(','):
493 493 if _commit_id not in ['', None, EmptyCommit.raw_id]:
494 494 if _commit_id not in multi_commit_ids:
495 495 multi_commit_ids.append(_commit_id)
496 496
497 497 commit_ids = multi_commit_ids or [commit_id]
498 498
499 499 data = []
500 500 # Multiple comments for each passed commit id
501 501 for current_id in filter(None, commit_ids):
502 502 comment_data = {
503 503 'comment_type': self.request.POST.get('comment_type'),
504 504 'text': self.request.POST.get('text'),
505 505 'status': self.request.POST.get('changeset_status', None),
506 506 'is_draft': self.request.POST.get('draft'),
507 507 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
508 508 'close_pull_request': self.request.POST.get('close_pull_request'),
509 509 'f_path': self.request.POST.get('f_path'),
510 510 'line': self.request.POST.get('line'),
511 511 }
512 512 comment = self._commit_comments_create(commit_id=current_id, comments=[comment_data])
513 513 data.append(comment)
514 514
515 515 return data if len(data) > 1 else data[0]
516 516
517 517 @LoginRequired()
518 518 @NotAnonymous()
519 519 @HasRepoPermissionAnyDecorator(
520 520 'repository.read', 'repository.write', 'repository.admin')
521 521 @CSRFRequired()
522 522 def repo_commit_comment_preview(self):
523 523 # Technically a CSRF token is not needed as no state changes with this
524 524 # call. However, as this is a POST is better to have it, so automated
525 525 # tools don't flag it as potential CSRF.
526 526 # Post is required because the payload could be bigger than the maximum
527 527 # allowed by GET.
528 528
529 529 text = self.request.POST.get('text')
530 530 renderer = self.request.POST.get('renderer') or 'rst'
531 531 if text:
532 532 return h.render(text, renderer=renderer, mentions=True,
533 533 repo_name=self.db_repo_name)
534 534 return ''
535 535
536 536 @LoginRequired()
537 537 @HasRepoPermissionAnyDecorator(
538 538 'repository.read', 'repository.write', 'repository.admin')
539 539 @CSRFRequired()
540 540 def repo_commit_comment_history_view(self):
541 541 c = self.load_default_context()
542 542
543 543 comment_history_id = self.request.matchdict['comment_history_id']
544 544 comment_history = ChangesetCommentHistory.get_or_404(comment_history_id)
545 545 is_repo_comment = comment_history.comment.repo.repo_id == self.db_repo.repo_id
546 546
547 547 if is_repo_comment:
548 548 c.comment_history = comment_history
549 549
550 550 rendered_comment = render(
551 551 'rhodecode:templates/changeset/comment_history.mako',
552 552 self._get_template_context(c)
553 553 , self.request)
554 554 return rendered_comment
555 555 else:
556 556 log.warning('No permissions for user %s to show comment_history_id: %s',
557 557 self._rhodecode_db_user, comment_history_id)
558 558 raise HTTPNotFound()
559 559
560 560 @LoginRequired()
561 561 @NotAnonymous()
562 562 @HasRepoPermissionAnyDecorator(
563 563 'repository.read', 'repository.write', 'repository.admin')
564 564 @CSRFRequired()
565 565 def repo_commit_comment_attachment_upload(self):
566 566 c = self.load_default_context()
567 567 upload_key = 'attachment'
568 568
569 569 file_obj = self.request.POST.get(upload_key)
570 570
571 571 if file_obj is None:
572 572 self.request.response.status = 400
573 573 return {'store_fid': None,
574 574 'access_path': None,
575 575 'error': '{} data field is missing'.format(upload_key)}
576 576
577 577 if not hasattr(file_obj, 'filename'):
578 578 self.request.response.status = 400
579 579 return {'store_fid': None,
580 580 'access_path': None,
581 581 'error': 'filename cannot be read from the data field'}
582 582
583 583 filename = file_obj.filename
584 584 file_display_name = filename
585 585
586 586 metadata = {
587 587 'user_uploaded': {'username': self._rhodecode_user.username,
588 588 'user_id': self._rhodecode_user.user_id,
589 589 'ip': self._rhodecode_user.ip_addr}}
590 590
591 591 # TODO(marcink): allow .ini configuration for allowed_extensions, and file-size
592 592 allowed_extensions = [
593 593 'gif', '.jpeg', '.jpg', '.png', '.docx', '.gz', '.log', '.pdf',
594 594 '.pptx', '.txt', '.xlsx', '.zip']
595 595 max_file_size = 10 * 1024 * 1024 # 10MB, also validated via dropzone.js
596 596
597 597 try:
598 598 storage = store_utils.get_file_storage(self.request.registry.settings)
599 599 store_uid, metadata = storage.save_file(
600 600 file_obj.file, filename, extra_metadata=metadata,
601 601 extensions=allowed_extensions, max_filesize=max_file_size)
602 602 except FileNotAllowedException:
603 603 self.request.response.status = 400
604 604 permitted_extensions = ', '.join(allowed_extensions)
605 605 error_msg = 'File `{}` is not allowed. ' \
606 606 'Only following extensions are permitted: {}'.format(
607 607 filename, permitted_extensions)
608 608 return {'store_fid': None,
609 609 'access_path': None,
610 610 'error': error_msg}
611 611 except FileOverSizeException:
612 612 self.request.response.status = 400
613 613 limit_mb = h.format_byte_size_binary(max_file_size)
614 614 return {'store_fid': None,
615 615 'access_path': None,
616 616 'error': 'File {} is exceeding allowed limit of {}.'.format(
617 617 filename, limit_mb)}
618 618
619 619 try:
620 620 entry = FileStore.create(
621 621 file_uid=store_uid, filename=metadata["filename"],
622 622 file_hash=metadata["sha256"], file_size=metadata["size"],
623 623 file_display_name=file_display_name,
624 624 file_description=u'comment attachment `{}`'.format(safe_unicode(filename)),
625 625 hidden=True, check_acl=True, user_id=self._rhodecode_user.user_id,
626 626 scope_repo_id=self.db_repo.repo_id
627 627 )
628 628 Session().add(entry)
629 629 Session().commit()
630 630 log.debug('Stored upload in DB as %s', entry)
631 631 except Exception:
632 632 log.exception('Failed to store file %s', filename)
633 633 self.request.response.status = 400
634 634 return {'store_fid': None,
635 635 'access_path': None,
636 636 'error': 'File {} failed to store in DB.'.format(filename)}
637 637
638 638 Session().commit()
639 639
640 640 return {
641 641 'store_fid': store_uid,
642 642 'access_path': h.route_path(
643 643 'download_file', fid=store_uid),
644 644 'fqn_access_path': h.route_url(
645 645 'download_file', fid=store_uid),
646 646 'repo_access_path': h.route_path(
647 647 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
648 648 'repo_fqn_access_path': h.route_url(
649 649 'repo_artifacts_get', repo_name=self.db_repo_name, uid=store_uid),
650 650 }
651 651
652 652 @LoginRequired()
653 653 @NotAnonymous()
654 654 @HasRepoPermissionAnyDecorator(
655 655 'repository.read', 'repository.write', 'repository.admin')
656 656 @CSRFRequired()
657 657 def repo_commit_comment_delete(self):
658 658 commit_id = self.request.matchdict['commit_id']
659 659 comment_id = self.request.matchdict['comment_id']
660 660
661 661 comment = ChangesetComment.get_or_404(comment_id)
662 662 if not comment:
663 663 log.debug('Comment with id:%s not found, skipping', comment_id)
664 664 # comment already deleted in another call probably
665 665 return True
666 666
667 667 if comment.immutable:
668 668 # don't allow deleting comments that are immutable
669 669 raise HTTPForbidden()
670 670
671 671 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
672 672 super_admin = h.HasPermissionAny('hg.admin')()
673 673 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
674 674 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
675 675 comment_repo_admin = is_repo_admin and is_repo_comment
676 676
677 if comment.draft and not comment_owner:
678 # We never allow to delete draft comments for other than owners
679 raise HTTPNotFound()
680
677 681 if super_admin or comment_owner or comment_repo_admin:
678 682 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
679 683 Session().commit()
680 684 return True
681 685 else:
682 686 log.warning('No permissions for user %s to delete comment_id: %s',
683 687 self._rhodecode_db_user, comment_id)
684 688 raise HTTPNotFound()
685 689
686 690 @LoginRequired()
687 691 @NotAnonymous()
688 692 @HasRepoPermissionAnyDecorator(
689 693 'repository.read', 'repository.write', 'repository.admin')
690 694 @CSRFRequired()
691 695 def repo_commit_comment_edit(self):
692 696 self.load_default_context()
693 697
694 698 commit_id = self.request.matchdict['commit_id']
695 699 comment_id = self.request.matchdict['comment_id']
696 700 comment = ChangesetComment.get_or_404(comment_id)
697 701
698 702 if comment.immutable:
699 703 # don't allow deleting comments that are immutable
700 704 raise HTTPForbidden()
701 705
702 706 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
703 707 super_admin = h.HasPermissionAny('hg.admin')()
704 708 comment_owner = (comment.author.user_id == self._rhodecode_db_user.user_id)
705 709 is_repo_comment = comment.repo.repo_id == self.db_repo.repo_id
706 710 comment_repo_admin = is_repo_admin and is_repo_comment
707 711
708 712 if super_admin or comment_owner or comment_repo_admin:
709 713 text = self.request.POST.get('text')
710 714 version = self.request.POST.get('version')
711 715 if text == comment.text:
712 716 log.warning(
713 717 'Comment(repo): '
714 718 'Trying to create new version '
715 719 'with the same comment body {}'.format(
716 720 comment_id,
717 721 )
718 722 )
719 723 raise HTTPNotFound()
720 724
721 725 if version.isdigit():
722 726 version = int(version)
723 727 else:
724 728 log.warning(
725 729 'Comment(repo): Wrong version type {} {} '
726 730 'for comment {}'.format(
727 731 version,
728 732 type(version),
729 733 comment_id,
730 734 )
731 735 )
732 736 raise HTTPNotFound()
733 737
734 738 try:
735 739 comment_history = CommentsModel().edit(
736 740 comment_id=comment_id,
737 741 text=text,
738 742 auth_user=self._rhodecode_user,
739 743 version=version,
740 744 )
741 745 except CommentVersionMismatch:
742 746 raise HTTPConflict()
743 747
744 748 if not comment_history:
745 749 raise HTTPNotFound()
746 750
747 751 if not comment.draft:
748 752 commit = self.db_repo.get_commit(commit_id)
749 753 CommentsModel().trigger_commit_comment_hook(
750 754 self.db_repo, self._rhodecode_user, 'edit',
751 755 data={'comment': comment, 'commit': commit})
752 756
753 757 Session().commit()
754 758 return {
755 759 'comment_history_id': comment_history.comment_history_id,
756 760 'comment_id': comment.comment_id,
757 761 'comment_version': comment_history.version,
758 762 'comment_author_username': comment_history.author.username,
759 763 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
760 764 'comment_created_on': h.age_component(comment_history.created_on,
761 765 time_is_local=True),
762 766 }
763 767 else:
764 768 log.warning('No permissions for user %s to edit comment_id: %s',
765 769 self._rhodecode_db_user, comment_id)
766 770 raise HTTPNotFound()
767 771
768 772 @LoginRequired()
769 773 @HasRepoPermissionAnyDecorator(
770 774 'repository.read', 'repository.write', 'repository.admin')
771 775 def repo_commit_data(self):
772 776 commit_id = self.request.matchdict['commit_id']
773 777 self.load_default_context()
774 778
775 779 try:
776 780 return self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
777 781 except CommitDoesNotExistError as e:
778 782 return EmptyCommit(message=str(e))
779 783
780 784 @LoginRequired()
781 785 @HasRepoPermissionAnyDecorator(
782 786 'repository.read', 'repository.write', 'repository.admin')
783 787 def repo_commit_children(self):
784 788 commit_id = self.request.matchdict['commit_id']
785 789 self.load_default_context()
786 790
787 791 try:
788 792 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
789 793 children = commit.children
790 794 except CommitDoesNotExistError:
791 795 children = []
792 796
793 797 result = {"results": children}
794 798 return result
795 799
796 800 @LoginRequired()
797 801 @HasRepoPermissionAnyDecorator(
798 802 'repository.read', 'repository.write', 'repository.admin')
799 803 def repo_commit_parents(self):
800 804 commit_id = self.request.matchdict['commit_id']
801 805 self.load_default_context()
802 806
803 807 try:
804 808 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
805 809 parents = commit.parents
806 810 except CommitDoesNotExistError:
807 811 parents = []
808 812 result = {"results": parents}
809 813 return result
@@ -1,206 +1,210 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2017-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import pytz
21 21 import logging
22 22
23 23 from pyramid.response import Response
24 24
25 25 from rhodecode.apps._base import RepoAppView
26 26 from rhodecode.lib.feedgenerator import Rss201rev2Feed, Atom1Feed
27 27 from rhodecode.lib import audit_logger
28 28 from rhodecode.lib import rc_cache
29 29 from rhodecode.lib import helpers as h
30 30 from rhodecode.lib.auth import (
31 31 LoginRequired, HasRepoPermissionAnyDecorator)
32 32 from rhodecode.lib.diffs import DiffProcessor, LimitedDiffContainer
33 33 from rhodecode.lib.utils2 import str2bool, safe_int, md5_safe
34 34 from rhodecode.model.db import UserApiKeys, CacheKey
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38
39 39 class RepoFeedView(RepoAppView):
40 40 def load_default_context(self):
41 41 c = self._get_local_tmpl_context()
42 42 self._load_defaults()
43 43 return c
44 44
45 45 def _get_config(self):
46 46 import rhodecode
47 47 config = rhodecode.CONFIG
48 48
49 49 return {
50 50 'language': 'en-us',
51 51 'feed_ttl': '5', # TTL of feed,
52 52 'feed_include_diff':
53 53 str2bool(config.get('rss_include_diff', False)),
54 54 'feed_items_per_page':
55 55 safe_int(config.get('rss_items_per_page', 20)),
56 56 'feed_diff_limit':
57 57 # we need to protect from parsing huge diffs here other way
58 58 # we can kill the server
59 59 safe_int(config.get('rss_cut_off_limit', 32 * 1024)),
60 60 }
61 61
62 62 def _load_defaults(self):
63 63 _ = self.request.translate
64 64 config = self._get_config()
65 65 # common values for feeds
66 66 self.description = _('Changes on %s repository')
67 67 self.title = _('%s %s feed') % (self.db_repo_name, '%s')
68 68 self.language = config["language"]
69 69 self.ttl = config["feed_ttl"]
70 70 self.feed_include_diff = config['feed_include_diff']
71 71 self.feed_diff_limit = config['feed_diff_limit']
72 72 self.feed_items_per_page = config['feed_items_per_page']
73 73
74 74 def _changes(self, commit):
75 75 diff_processor = DiffProcessor(
76 76 commit.diff(), diff_limit=self.feed_diff_limit)
77 77 _parsed = diff_processor.prepare(inline_diff=False)
78 78 limited_diff = isinstance(_parsed, LimitedDiffContainer)
79 79
80 80 return diff_processor, _parsed, limited_diff
81 81
82 82 def _get_title(self, commit):
83 83 return h.chop_at_smart(commit.message, '\n', suffix_if_chopped='...')
84 84
85 85 def _get_description(self, commit):
86 86 _renderer = self.request.get_partial_renderer(
87 87 'rhodecode:templates/feed/atom_feed_entry.mako')
88 88 diff_processor, parsed_diff, limited_diff = self._changes(commit)
89 89 filtered_parsed_diff, has_hidden_changes = self.path_filter.filter_patchset(parsed_diff)
90 90 return _renderer(
91 91 'body',
92 92 commit=commit,
93 93 parsed_diff=filtered_parsed_diff,
94 94 limited_diff=limited_diff,
95 95 feed_include_diff=self.feed_include_diff,
96 96 diff_processor=diff_processor,
97 97 has_hidden_changes=has_hidden_changes
98 98 )
99 99
100 100 def _set_timezone(self, date, tzinfo=pytz.utc):
101 101 if not getattr(date, "tzinfo", None):
102 102 date.replace(tzinfo=tzinfo)
103 103 return date
104 104
105 105 def _get_commits(self):
106 106 pre_load = ['author', 'branch', 'date', 'message', 'parents']
107 if self.rhodecode_vcs_repo.is_empty():
108 return []
109
107 110 collection = self.rhodecode_vcs_repo.get_commits(
108 111 branch_name=None, show_hidden=False, pre_load=pre_load,
109 112 translate_tags=False)
110 113
111 114 return list(collection[-self.feed_items_per_page:])
112 115
113 116 def uid(self, repo_id, commit_id):
114 117 return '{}:{}'.format(md5_safe(repo_id), md5_safe(commit_id))
115 118
116 119 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
117 120 @HasRepoPermissionAnyDecorator(
118 121 'repository.read', 'repository.write', 'repository.admin')
119 122 def atom(self):
120 123 """
121 124 Produce an atom-1.0 feed via feedgenerator module
122 125 """
123 126 self.load_default_context()
124 127 force_recache = self.get_recache_flag()
125 128
126 129 cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id)
127 130 condition = not (self.path_filter.is_enabled or force_recache)
128 131 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
129 132
130 133 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
131 134 condition=condition)
132 135 def generate_atom_feed(repo_id, _repo_name, _commit_id, _feed_type):
133 136 feed = Atom1Feed(
134 137 title=self.title % 'atom',
135 138 link=h.route_url('repo_summary', repo_name=_repo_name),
136 139 description=self.description % _repo_name,
137 140 language=self.language,
138 141 ttl=self.ttl
139 142 )
143
140 144 for commit in reversed(self._get_commits()):
141 145 date = self._set_timezone(commit.date)
142 146 feed.add_item(
143 147 unique_id=self.uid(repo_id, commit.raw_id),
144 148 title=self._get_title(commit),
145 149 author_name=commit.author,
146 150 description=self._get_description(commit),
147 151 link=h.route_url(
148 152 'repo_commit', repo_name=_repo_name,
149 153 commit_id=commit.raw_id),
150 154 pubdate=date,)
151 155
152 156 return feed.content_type, feed.writeString('utf-8')
153 157
154 158 commit_id = self.db_repo.changeset_cache.get('raw_id')
155 159 content_type, feed = generate_atom_feed(
156 160 self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'atom')
157 161
158 162 response = Response(feed)
159 163 response.content_type = content_type
160 164 return response
161 165
162 166 @LoginRequired(auth_token_access=[UserApiKeys.ROLE_FEED])
163 167 @HasRepoPermissionAnyDecorator(
164 168 'repository.read', 'repository.write', 'repository.admin')
165 169 def rss(self):
166 170 """
167 171 Produce an rss2 feed via feedgenerator module
168 172 """
169 173 self.load_default_context()
170 174 force_recache = self.get_recache_flag()
171 175
172 176 cache_namespace_uid = 'cache_repo_feed.{}'.format(self.db_repo.repo_id)
173 177 condition = not (self.path_filter.is_enabled or force_recache)
174 178 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
175 179
176 180 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
177 181 condition=condition)
178 182 def generate_rss_feed(repo_id, _repo_name, _commit_id, _feed_type):
179 183 feed = Rss201rev2Feed(
180 184 title=self.title % 'rss',
181 185 link=h.route_url('repo_summary', repo_name=_repo_name),
182 186 description=self.description % _repo_name,
183 187 language=self.language,
184 188 ttl=self.ttl
185 189 )
186 190
187 191 for commit in reversed(self._get_commits()):
188 192 date = self._set_timezone(commit.date)
189 193 feed.add_item(
190 194 unique_id=self.uid(repo_id, commit.raw_id),
191 195 title=self._get_title(commit),
192 196 author_name=commit.author,
193 197 description=self._get_description(commit),
194 198 link=h.route_url(
195 199 'repo_commit', repo_name=_repo_name,
196 200 commit_id=commit.raw_id),
197 201 pubdate=date,)
198 202 return feed.content_type, feed.writeString('utf-8')
199 203
200 204 commit_id = self.db_repo.changeset_cache.get('raw_id')
201 205 content_type, feed = generate_rss_feed(
202 206 self.db_repo.repo_id, self.db_repo.repo_name, commit_id, 'rss')
203 207
204 208 response = Response(feed)
205 209 response.content_type = content_type
206 210 return response
@@ -1,1574 +1,1581 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import itertools
22 22 import logging
23 23 import os
24 24 import shutil
25 25 import tempfile
26 26 import collections
27 27 import urllib
28 28 import pathlib2
29 29
30 30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31 31
32 32 from pyramid.renderers import render
33 33 from pyramid.response import Response
34 34
35 35 import rhodecode
36 36 from rhodecode.apps._base import RepoAppView
37 37
38 38
39 39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 40 from rhodecode.lib import audit_logger
41 41 from rhodecode.lib.view_utils import parse_path_ref
42 42 from rhodecode.lib.exceptions import NonRelativePathError
43 43 from rhodecode.lib.codeblocks import (
44 44 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
45 45 from rhodecode.lib.utils2 import (
46 46 convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1, safe_unicode)
47 47 from rhodecode.lib.auth import (
48 48 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
49 49 from rhodecode.lib.vcs import path as vcspath
50 50 from rhodecode.lib.vcs.backends.base import EmptyCommit
51 51 from rhodecode.lib.vcs.conf import settings
52 52 from rhodecode.lib.vcs.nodes import FileNode
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
55 55 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
56 56 NodeDoesNotExistError, CommitError, NodeError)
57 57
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.db import Repository
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class RepoFilesView(RepoAppView):
65 65
66 66 @staticmethod
67 67 def adjust_file_path_for_svn(f_path, repo):
68 68 """
69 69 Computes the relative path of `f_path`.
70 70
71 71 This is mainly based on prefix matching of the recognized tags and
72 72 branches in the underlying repository.
73 73 """
74 74 tags_and_branches = itertools.chain(
75 75 repo.branches.iterkeys(),
76 76 repo.tags.iterkeys())
77 77 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
78 78
79 79 for name in tags_and_branches:
80 80 if f_path.startswith('{}/'.format(name)):
81 81 f_path = vcspath.relpath(f_path, name)
82 82 break
83 83 return f_path
84 84
85 85 def load_default_context(self):
86 86 c = self._get_local_tmpl_context(include_app_defaults=True)
87 87 c.rhodecode_repo = self.rhodecode_vcs_repo
88 88 c.enable_downloads = self.db_repo.enable_downloads
89 89 return c
90 90
91 91 def _ensure_not_locked(self, commit_id='tip'):
92 92 _ = self.request.translate
93 93
94 94 repo = self.db_repo
95 95 if repo.enable_locking and repo.locked[0]:
96 96 h.flash(_('This repository has been locked by %s on %s')
97 97 % (h.person_by_id(repo.locked[0]),
98 98 h.format_date(h.time_to_datetime(repo.locked[1]))),
99 99 'warning')
100 100 files_url = h.route_path(
101 101 'repo_files:default_path',
102 102 repo_name=self.db_repo_name, commit_id=commit_id)
103 103 raise HTTPFound(files_url)
104 104
105 105 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
106 106 _ = self.request.translate
107 107
108 108 if not is_head:
109 109 message = _('Cannot modify file. '
110 110 'Given commit `{}` is not head of a branch.').format(commit_id)
111 111 h.flash(message, category='warning')
112 112
113 113 if json_mode:
114 114 return message
115 115
116 116 files_url = h.route_path(
117 117 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
118 118 f_path=f_path)
119 119 raise HTTPFound(files_url)
120 120
121 121 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
122 122 _ = self.request.translate
123 123
124 124 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
125 125 self.db_repo_name, branch_name)
126 126 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
127 127 message = _('Branch `{}` changes forbidden by rule {}.').format(
128 128 h.escape(branch_name), h.escape(rule))
129 129 h.flash(message, 'warning')
130 130
131 131 if json_mode:
132 132 return message
133 133
134 134 files_url = h.route_path(
135 135 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
136 136
137 137 raise HTTPFound(files_url)
138 138
139 139 def _get_commit_and_path(self):
140 140 default_commit_id = self.db_repo.landing_ref_name
141 141 default_f_path = '/'
142 142
143 143 commit_id = self.request.matchdict.get(
144 144 'commit_id', default_commit_id)
145 145 f_path = self._get_f_path(self.request.matchdict, default_f_path)
146 146 return commit_id, f_path
147 147
148 148 def _get_default_encoding(self, c):
149 149 enc_list = getattr(c, 'default_encodings', [])
150 150 return enc_list[0] if enc_list else 'UTF-8'
151 151
152 152 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
153 153 """
154 154 This is a safe way to get commit. If an error occurs it redirects to
155 155 tip with proper message
156 156
157 157 :param commit_id: id of commit to fetch
158 158 :param redirect_after: toggle redirection
159 159 """
160 160 _ = self.request.translate
161 161
162 162 try:
163 163 return self.rhodecode_vcs_repo.get_commit(commit_id)
164 164 except EmptyRepositoryError:
165 165 if not redirect_after:
166 166 return None
167 167
168 168 _url = h.route_path(
169 169 'repo_files_add_file',
170 170 repo_name=self.db_repo_name, commit_id=0, f_path='')
171 171
172 172 if h.HasRepoPermissionAny(
173 173 'repository.write', 'repository.admin')(self.db_repo_name):
174 174 add_new = h.link_to(
175 175 _('Click here to add a new file.'), _url, class_="alert-link")
176 176 else:
177 177 add_new = ""
178 178
179 179 h.flash(h.literal(
180 180 _('There are no files yet. %s') % add_new), category='warning')
181 181 raise HTTPFound(
182 182 h.route_path('repo_summary', repo_name=self.db_repo_name))
183 183
184 184 except (CommitDoesNotExistError, LookupError) as e:
185 185 msg = _('No such commit exists for this repository. Commit: {}').format(commit_id)
186 186 h.flash(msg, category='error')
187 187 raise HTTPNotFound()
188 188 except RepositoryError as e:
189 189 h.flash(safe_str(h.escape(e)), category='error')
190 190 raise HTTPNotFound()
191 191
192 192 def _get_filenode_or_redirect(self, commit_obj, path):
193 193 """
194 194 Returns file_node, if error occurs or given path is directory,
195 195 it'll redirect to top level path
196 196 """
197 197 _ = self.request.translate
198 198
199 199 try:
200 200 file_node = commit_obj.get_node(path)
201 201 if file_node.is_dir():
202 202 raise RepositoryError('The given path is a directory')
203 203 except CommitDoesNotExistError:
204 204 log.exception('No such commit exists for this repository')
205 205 h.flash(_('No such commit exists for this repository'), category='error')
206 206 raise HTTPNotFound()
207 207 except RepositoryError as e:
208 208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
209 209 h.flash(safe_str(h.escape(e)), category='error')
210 210 raise HTTPNotFound()
211 211
212 212 return file_node
213 213
214 214 def _is_valid_head(self, commit_id, repo, landing_ref):
215 215 branch_name = sha_commit_id = ''
216 216 is_head = False
217 217 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
218 218
219 219 for _branch_name, branch_commit_id in repo.branches.items():
220 220 # simple case we pass in branch name, it's a HEAD
221 221 if commit_id == _branch_name:
222 222 is_head = True
223 223 branch_name = _branch_name
224 224 sha_commit_id = branch_commit_id
225 225 break
226 226 # case when we pass in full sha commit_id, which is a head
227 227 elif commit_id == branch_commit_id:
228 228 is_head = True
229 229 branch_name = _branch_name
230 230 sha_commit_id = branch_commit_id
231 231 break
232 232
233 233 if h.is_svn(repo) and not repo.is_empty():
234 234 # Note: Subversion only has one head.
235 235 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
236 236 is_head = True
237 237 return branch_name, sha_commit_id, is_head
238 238
239 239 # checked branches, means we only need to try to get the branch/commit_sha
240 240 if repo.is_empty():
241 241 is_head = True
242 242 branch_name = landing_ref
243 243 sha_commit_id = EmptyCommit().raw_id
244 244 else:
245 245 commit = repo.get_commit(commit_id=commit_id)
246 246 if commit:
247 247 branch_name = commit.branch
248 248 sha_commit_id = commit.raw_id
249 249
250 250 return branch_name, sha_commit_id, is_head
251 251
252 252 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False, at_rev=None):
253 253
254 254 repo_id = self.db_repo.repo_id
255 255 force_recache = self.get_recache_flag()
256 256
257 257 cache_seconds = safe_int(
258 258 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
259 259 cache_on = not force_recache and cache_seconds > 0
260 260 log.debug(
261 261 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
262 262 'with caching: %s[TTL: %ss]' % (
263 263 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
264 264
265 265 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
266 266 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
267 267
268 268 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
269 269 def compute_file_tree(ver, _name_hash, _repo_id, _commit_id, _f_path, _full_load, _at_rev):
270 270 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
271 271 ver, _repo_id, _commit_id, _f_path)
272 272
273 273 c.full_load = _full_load
274 274 return render(
275 275 'rhodecode:templates/files/files_browser_tree.mako',
276 276 self._get_template_context(c), self.request, _at_rev)
277 277
278 278 return compute_file_tree(
279 279 rc_cache.FILE_TREE_CACHE_VER, self.db_repo.repo_name_hash,
280 280 self.db_repo.repo_id, commit_id, f_path, full_load, at_rev)
281 281
282 282 def _get_archive_spec(self, fname):
283 283 log.debug('Detecting archive spec for: `%s`', fname)
284 284
285 285 fileformat = None
286 286 ext = None
287 287 content_type = None
288 288 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
289 289
290 290 if fname.endswith(extension):
291 291 fileformat = a_type
292 292 log.debug('archive is of type: %s', fileformat)
293 293 ext = extension
294 294 break
295 295
296 296 if not fileformat:
297 297 raise ValueError()
298 298
299 299 # left over part of whole fname is the commit
300 300 commit_id = fname[:-len(ext)]
301 301
302 302 return commit_id, ext, fileformat, content_type
303 303
304 304 def create_pure_path(self, *parts):
305 305 # Split paths and sanitize them, removing any ../ etc
306 306 sanitized_path = [
307 307 x for x in pathlib2.PurePath(*parts).parts
308 308 if x not in ['.', '..']]
309 309
310 310 pure_path = pathlib2.PurePath(*sanitized_path)
311 311 return pure_path
312 312
313 313 def _is_lf_enabled(self, target_repo):
314 314 lf_enabled = False
315 315
316 316 lf_key_for_vcs_map = {
317 317 'hg': 'extensions_largefiles',
318 318 'git': 'vcs_git_lfs_enabled'
319 319 }
320 320
321 321 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
322 322
323 323 if lf_key_for_vcs:
324 324 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
325 325
326 326 return lf_enabled
327 327
328 def _get_archive_name(self, db_repo_name, commit_sha, ext, subrepos=False, path_sha=''):
328 def _get_archive_name(self, db_repo_name, commit_sha, ext, subrepos=False, path_sha='', with_hash=True):
329 329 # original backward compat name of archive
330 330 clean_name = safe_str(db_repo_name.replace('/', '_'))
331 331
332 332 # e.g vcsserver.zip
333 333 # e.g vcsserver-abcdefgh.zip
334 334 # e.g vcsserver-abcdefgh-defghijk.zip
335 archive_name = '{}{}{}{}{}'.format(
335 archive_name = '{}{}{}{}{}{}'.format(
336 336 clean_name,
337 337 '-sub' if subrepos else '',
338 338 commit_sha,
339 '-{}'.format('plain') if not with_hash else '',
339 340 '-{}'.format(path_sha) if path_sha else '',
340 341 ext)
341 342 return archive_name
342 343
343 344 @LoginRequired()
344 345 @HasRepoPermissionAnyDecorator(
345 346 'repository.read', 'repository.write', 'repository.admin')
346 347 def repo_archivefile(self):
347 348 # archive cache config
348 349 from rhodecode import CONFIG
349 350 _ = self.request.translate
350 351 self.load_default_context()
351 352 default_at_path = '/'
352 353 fname = self.request.matchdict['fname']
353 354 subrepos = self.request.GET.get('subrepos') == 'true'
354 355 with_hash = str2bool(self.request.GET.get('with_hash', '1'))
355 356 at_path = self.request.GET.get('at_path') or default_at_path
356 357
357 358 if not self.db_repo.enable_downloads:
358 359 return Response(_('Downloads disabled'))
359 360
360 361 try:
361 362 commit_id, ext, fileformat, content_type = \
362 363 self._get_archive_spec(fname)
363 364 except ValueError:
364 365 return Response(_('Unknown archive type for: `{}`').format(
365 366 h.escape(fname)))
366 367
367 368 try:
368 369 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
369 370 except CommitDoesNotExistError:
370 371 return Response(_('Unknown commit_id {}').format(
371 372 h.escape(commit_id)))
372 373 except EmptyRepositoryError:
373 374 return Response(_('Empty repository'))
374 375
376 # we used a ref, or a shorter version, lets redirect client ot use explicit hash
377 if commit_id != commit.raw_id:
378 fname='{}{}'.format(commit.raw_id, ext)
379 raise HTTPFound(self.request.current_route_path(fname=fname))
380
375 381 try:
376 382 at_path = commit.get_node(at_path).path or default_at_path
377 383 except Exception:
378 384 return Response(_('No node at path {} for this repository').format(at_path))
379 385
380 386 # path sha is part of subdir
381 387 path_sha = ''
382 388 if at_path != default_at_path:
383 389 path_sha = sha1(at_path)[:8]
384 390 short_sha = '-{}'.format(safe_str(commit.short_id))
385 391 # used for cache etc
386 392 archive_name = self._get_archive_name(
387 393 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
388 path_sha=path_sha)
394 path_sha=path_sha, with_hash=with_hash)
389 395
390 396 if not with_hash:
391 397 short_sha = ''
392 398 path_sha = ''
393 399
394 400 # what end client gets served
395 401 response_archive_name = self._get_archive_name(
396 402 self.db_repo_name, commit_sha=short_sha, ext=ext, subrepos=subrepos,
397 path_sha=path_sha)
403 path_sha=path_sha, with_hash=with_hash)
398 404 # remove extension from our archive directory name
399 405 archive_dir_name = response_archive_name[:-len(ext)]
400 406
401 407 use_cached_archive = False
402 408 archive_cache_dir = CONFIG.get('archive_cache_dir')
403 409 archive_cache_enabled = archive_cache_dir and not self.request.GET.get('no_cache')
404 410 cached_archive_path = None
405 411
406 412 if archive_cache_enabled:
407 # check if we it's ok to write
413 # check if we it's ok to write, and re-create the archive cache
408 414 if not os.path.isdir(CONFIG['archive_cache_dir']):
409 415 os.makedirs(CONFIG['archive_cache_dir'])
416
410 417 cached_archive_path = os.path.join(
411 418 CONFIG['archive_cache_dir'], archive_name)
412 419 if os.path.isfile(cached_archive_path):
413 420 log.debug('Found cached archive in %s', cached_archive_path)
414 421 fd, archive = None, cached_archive_path
415 422 use_cached_archive = True
416 423 else:
417 424 log.debug('Archive %s is not yet cached', archive_name)
418 425
419 426 # generate new archive, as previous was not found in the cache
420 427 if not use_cached_archive:
421 428 _dir = os.path.abspath(archive_cache_dir) if archive_cache_dir else None
422 429 fd, archive = tempfile.mkstemp(dir=_dir)
423 430 log.debug('Creating new temp archive in %s', archive)
424 431 try:
425 432 commit.archive_repo(archive, archive_dir_name=archive_dir_name,
426 433 kind=fileformat, subrepos=subrepos,
427 434 archive_at_path=at_path)
428 435 except ImproperArchiveTypeError:
429 436 return _('Unknown archive type')
430 437 if archive_cache_enabled:
431 438 # if we generated the archive and we have cache enabled
432 439 # let's use this for future
433 440 log.debug('Storing new archive in %s', cached_archive_path)
434 441 shutil.move(archive, cached_archive_path)
435 442 archive = cached_archive_path
436 443
437 444 # store download action
438 445 audit_logger.store_web(
439 446 'repo.archive.download', action_data={
440 447 'user_agent': self.request.user_agent,
441 448 'archive_name': archive_name,
442 449 'archive_spec': fname,
443 450 'archive_cached': use_cached_archive},
444 451 user=self._rhodecode_user,
445 452 repo=self.db_repo,
446 453 commit=True
447 454 )
448 455
449 456 def get_chunked_archive(archive_path):
450 457 with open(archive_path, 'rb') as stream:
451 458 while True:
452 459 data = stream.read(16 * 1024)
453 460 if not data:
454 461 if fd: # fd means we used temporary file
455 462 os.close(fd)
456 463 if not archive_cache_enabled:
457 464 log.debug('Destroying temp archive %s', archive_path)
458 465 os.remove(archive_path)
459 466 break
460 467 yield data
461 468
462 469 response = Response(app_iter=get_chunked_archive(archive))
463 470 response.content_disposition = str('attachment; filename=%s' % response_archive_name)
464 471 response.content_type = str(content_type)
465 472
466 473 return response
467 474
468 475 def _get_file_node(self, commit_id, f_path):
469 476 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
470 477 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
471 478 try:
472 479 node = commit.get_node(f_path)
473 480 if node.is_dir():
474 481 raise NodeError('%s path is a %s not a file'
475 482 % (node, type(node)))
476 483 except NodeDoesNotExistError:
477 484 commit = EmptyCommit(
478 485 commit_id=commit_id,
479 486 idx=commit.idx,
480 487 repo=commit.repository,
481 488 alias=commit.repository.alias,
482 489 message=commit.message,
483 490 author=commit.author,
484 491 date=commit.date)
485 492 node = FileNode(f_path, '', commit=commit)
486 493 else:
487 494 commit = EmptyCommit(
488 495 repo=self.rhodecode_vcs_repo,
489 496 alias=self.rhodecode_vcs_repo.alias)
490 497 node = FileNode(f_path, '', commit=commit)
491 498 return node
492 499
493 500 @LoginRequired()
494 501 @HasRepoPermissionAnyDecorator(
495 502 'repository.read', 'repository.write', 'repository.admin')
496 503 def repo_files_diff(self):
497 504 c = self.load_default_context()
498 505 f_path = self._get_f_path(self.request.matchdict)
499 506 diff1 = self.request.GET.get('diff1', '')
500 507 diff2 = self.request.GET.get('diff2', '')
501 508
502 509 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
503 510
504 511 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
505 512 line_context = self.request.GET.get('context', 3)
506 513
507 514 if not any((diff1, diff2)):
508 515 h.flash(
509 516 'Need query parameter "diff1" or "diff2" to generate a diff.',
510 517 category='error')
511 518 raise HTTPBadRequest()
512 519
513 520 c.action = self.request.GET.get('diff')
514 521 if c.action not in ['download', 'raw']:
515 522 compare_url = h.route_path(
516 523 'repo_compare',
517 524 repo_name=self.db_repo_name,
518 525 source_ref_type='rev',
519 526 source_ref=diff1,
520 527 target_repo=self.db_repo_name,
521 528 target_ref_type='rev',
522 529 target_ref=diff2,
523 530 _query=dict(f_path=f_path))
524 531 # redirect to new view if we render diff
525 532 raise HTTPFound(compare_url)
526 533
527 534 try:
528 535 node1 = self._get_file_node(diff1, path1)
529 536 node2 = self._get_file_node(diff2, f_path)
530 537 except (RepositoryError, NodeError):
531 538 log.exception("Exception while trying to get node from repository")
532 539 raise HTTPFound(
533 540 h.route_path('repo_files', repo_name=self.db_repo_name,
534 541 commit_id='tip', f_path=f_path))
535 542
536 543 if all(isinstance(node.commit, EmptyCommit)
537 544 for node in (node1, node2)):
538 545 raise HTTPNotFound()
539 546
540 547 c.commit_1 = node1.commit
541 548 c.commit_2 = node2.commit
542 549
543 550 if c.action == 'download':
544 551 _diff = diffs.get_gitdiff(node1, node2,
545 552 ignore_whitespace=ignore_whitespace,
546 553 context=line_context)
547 554 diff = diffs.DiffProcessor(_diff, format='gitdiff')
548 555
549 556 response = Response(self.path_filter.get_raw_patch(diff))
550 557 response.content_type = 'text/plain'
551 558 response.content_disposition = (
552 559 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
553 560 )
554 561 charset = self._get_default_encoding(c)
555 562 if charset:
556 563 response.charset = charset
557 564 return response
558 565
559 566 elif c.action == 'raw':
560 567 _diff = diffs.get_gitdiff(node1, node2,
561 568 ignore_whitespace=ignore_whitespace,
562 569 context=line_context)
563 570 diff = diffs.DiffProcessor(_diff, format='gitdiff')
564 571
565 572 response = Response(self.path_filter.get_raw_patch(diff))
566 573 response.content_type = 'text/plain'
567 574 charset = self._get_default_encoding(c)
568 575 if charset:
569 576 response.charset = charset
570 577 return response
571 578
572 579 # in case we ever end up here
573 580 raise HTTPNotFound()
574 581
575 582 @LoginRequired()
576 583 @HasRepoPermissionAnyDecorator(
577 584 'repository.read', 'repository.write', 'repository.admin')
578 585 def repo_files_diff_2way_redirect(self):
579 586 """
580 587 Kept only to make OLD links work
581 588 """
582 589 f_path = self._get_f_path_unchecked(self.request.matchdict)
583 590 diff1 = self.request.GET.get('diff1', '')
584 591 diff2 = self.request.GET.get('diff2', '')
585 592
586 593 if not any((diff1, diff2)):
587 594 h.flash(
588 595 'Need query parameter "diff1" or "diff2" to generate a diff.',
589 596 category='error')
590 597 raise HTTPBadRequest()
591 598
592 599 compare_url = h.route_path(
593 600 'repo_compare',
594 601 repo_name=self.db_repo_name,
595 602 source_ref_type='rev',
596 603 source_ref=diff1,
597 604 target_ref_type='rev',
598 605 target_ref=diff2,
599 606 _query=dict(f_path=f_path, diffmode='sideside',
600 607 target_repo=self.db_repo_name,))
601 608 raise HTTPFound(compare_url)
602 609
603 610 @LoginRequired()
604 611 def repo_files_default_commit_redirect(self):
605 612 """
606 613 Special page that redirects to the landing page of files based on the default
607 614 commit for repository
608 615 """
609 616 c = self.load_default_context()
610 617 ref_name = c.rhodecode_db_repo.landing_ref_name
611 618 landing_url = h.repo_files_by_ref_url(
612 619 c.rhodecode_db_repo.repo_name,
613 620 c.rhodecode_db_repo.repo_type,
614 621 f_path='',
615 622 ref_name=ref_name,
616 623 commit_id='tip',
617 624 query=dict(at=ref_name)
618 625 )
619 626
620 627 raise HTTPFound(landing_url)
621 628
622 629 @LoginRequired()
623 630 @HasRepoPermissionAnyDecorator(
624 631 'repository.read', 'repository.write', 'repository.admin')
625 632 def repo_files(self):
626 633 c = self.load_default_context()
627 634
628 635 view_name = getattr(self.request.matched_route, 'name', None)
629 636
630 637 c.annotate = view_name == 'repo_files:annotated'
631 638 # default is false, but .rst/.md files later are auto rendered, we can
632 639 # overwrite auto rendering by setting this GET flag
633 640 c.renderer = view_name == 'repo_files:rendered' or \
634 641 not self.request.GET.get('no-render', False)
635 642
636 643 commit_id, f_path = self._get_commit_and_path()
637 644
638 645 c.commit = self._get_commit_or_redirect(commit_id)
639 646 c.branch = self.request.GET.get('branch', None)
640 647 c.f_path = f_path
641 648 at_rev = self.request.GET.get('at')
642 649
643 650 # prev link
644 651 try:
645 652 prev_commit = c.commit.prev(c.branch)
646 653 c.prev_commit = prev_commit
647 654 c.url_prev = h.route_path(
648 655 'repo_files', repo_name=self.db_repo_name,
649 656 commit_id=prev_commit.raw_id, f_path=f_path)
650 657 if c.branch:
651 658 c.url_prev += '?branch=%s' % c.branch
652 659 except (CommitDoesNotExistError, VCSError):
653 660 c.url_prev = '#'
654 661 c.prev_commit = EmptyCommit()
655 662
656 663 # next link
657 664 try:
658 665 next_commit = c.commit.next(c.branch)
659 666 c.next_commit = next_commit
660 667 c.url_next = h.route_path(
661 668 'repo_files', repo_name=self.db_repo_name,
662 669 commit_id=next_commit.raw_id, f_path=f_path)
663 670 if c.branch:
664 671 c.url_next += '?branch=%s' % c.branch
665 672 except (CommitDoesNotExistError, VCSError):
666 673 c.url_next = '#'
667 674 c.next_commit = EmptyCommit()
668 675
669 676 # files or dirs
670 677 try:
671 678 c.file = c.commit.get_node(f_path)
672 679 c.file_author = True
673 680 c.file_tree = ''
674 681
675 682 # load file content
676 683 if c.file.is_file():
677 684 c.lf_node = {}
678 685
679 686 has_lf_enabled = self._is_lf_enabled(self.db_repo)
680 687 if has_lf_enabled:
681 688 c.lf_node = c.file.get_largefile_node()
682 689
683 690 c.file_source_page = 'true'
684 691 c.file_last_commit = c.file.last_commit
685 692
686 693 c.file_size_too_big = c.file.size > c.visual.cut_off_limit_file
687 694
688 695 if not (c.file_size_too_big or c.file.is_binary):
689 696 if c.annotate: # annotation has precedence over renderer
690 697 c.annotated_lines = filenode_as_annotated_lines_tokens(
691 698 c.file
692 699 )
693 700 else:
694 701 c.renderer = (
695 702 c.renderer and h.renderer_from_filename(c.file.path)
696 703 )
697 704 if not c.renderer:
698 705 c.lines = filenode_as_lines_tokens(c.file)
699 706
700 707 _branch_name, _sha_commit_id, is_head = \
701 708 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
702 709 landing_ref=self.db_repo.landing_ref_name)
703 710 c.on_branch_head = is_head
704 711
705 712 branch = c.commit.branch if (
706 713 c.commit.branch and '/' not in c.commit.branch) else None
707 714 c.branch_or_raw_id = branch or c.commit.raw_id
708 715 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
709 716
710 717 author = c.file_last_commit.author
711 718 c.authors = [[
712 719 h.email(author),
713 720 h.person(author, 'username_or_name_or_email'),
714 721 1
715 722 ]]
716 723
717 724 else: # load tree content at path
718 725 c.file_source_page = 'false'
719 726 c.authors = []
720 727 # this loads a simple tree without metadata to speed things up
721 728 # later via ajax we call repo_nodetree_full and fetch whole
722 729 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path, at_rev=at_rev)
723 730
724 731 c.readme_data, c.readme_file = \
725 732 self._get_readme_data(self.db_repo, c.visual.default_renderer,
726 733 c.commit.raw_id, f_path)
727 734
728 735 except RepositoryError as e:
729 736 h.flash(safe_str(h.escape(e)), category='error')
730 737 raise HTTPNotFound()
731 738
732 739 if self.request.environ.get('HTTP_X_PJAX'):
733 740 html = render('rhodecode:templates/files/files_pjax.mako',
734 741 self._get_template_context(c), self.request)
735 742 else:
736 743 html = render('rhodecode:templates/files/files.mako',
737 744 self._get_template_context(c), self.request)
738 745 return Response(html)
739 746
740 747 @HasRepoPermissionAnyDecorator(
741 748 'repository.read', 'repository.write', 'repository.admin')
742 749 def repo_files_annotated_previous(self):
743 750 self.load_default_context()
744 751
745 752 commit_id, f_path = self._get_commit_and_path()
746 753 commit = self._get_commit_or_redirect(commit_id)
747 754 prev_commit_id = commit.raw_id
748 755 line_anchor = self.request.GET.get('line_anchor')
749 756 is_file = False
750 757 try:
751 758 _file = commit.get_node(f_path)
752 759 is_file = _file.is_file()
753 760 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
754 761 pass
755 762
756 763 if is_file:
757 764 history = commit.get_path_history(f_path)
758 765 prev_commit_id = history[1].raw_id \
759 766 if len(history) > 1 else prev_commit_id
760 767 prev_url = h.route_path(
761 768 'repo_files:annotated', repo_name=self.db_repo_name,
762 769 commit_id=prev_commit_id, f_path=f_path,
763 770 _anchor='L{}'.format(line_anchor))
764 771
765 772 raise HTTPFound(prev_url)
766 773
767 774 @LoginRequired()
768 775 @HasRepoPermissionAnyDecorator(
769 776 'repository.read', 'repository.write', 'repository.admin')
770 777 def repo_nodetree_full(self):
771 778 """
772 779 Returns rendered html of file tree that contains commit date,
773 780 author, commit_id for the specified combination of
774 781 repo, commit_id and file path
775 782 """
776 783 c = self.load_default_context()
777 784
778 785 commit_id, f_path = self._get_commit_and_path()
779 786 commit = self._get_commit_or_redirect(commit_id)
780 787 try:
781 788 dir_node = commit.get_node(f_path)
782 789 except RepositoryError as e:
783 790 return Response('error: {}'.format(h.escape(safe_str(e))))
784 791
785 792 if dir_node.is_file():
786 793 return Response('')
787 794
788 795 c.file = dir_node
789 796 c.commit = commit
790 797 at_rev = self.request.GET.get('at')
791 798
792 799 html = self._get_tree_at_commit(
793 800 c, commit.raw_id, dir_node.path, full_load=True, at_rev=at_rev)
794 801
795 802 return Response(html)
796 803
797 804 def _get_attachement_headers(self, f_path):
798 805 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
799 806 safe_path = f_name.replace('"', '\\"')
800 807 encoded_path = urllib.quote(f_name)
801 808
802 809 return "attachment; " \
803 810 "filename=\"{}\"; " \
804 811 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
805 812
806 813 @LoginRequired()
807 814 @HasRepoPermissionAnyDecorator(
808 815 'repository.read', 'repository.write', 'repository.admin')
809 816 def repo_file_raw(self):
810 817 """
811 818 Action for show as raw, some mimetypes are "rendered",
812 819 those include images, icons.
813 820 """
814 821 c = self.load_default_context()
815 822
816 823 commit_id, f_path = self._get_commit_and_path()
817 824 commit = self._get_commit_or_redirect(commit_id)
818 825 file_node = self._get_filenode_or_redirect(commit, f_path)
819 826
820 827 raw_mimetype_mapping = {
821 828 # map original mimetype to a mimetype used for "show as raw"
822 829 # you can also provide a content-disposition to override the
823 830 # default "attachment" disposition.
824 831 # orig_type: (new_type, new_dispo)
825 832
826 833 # show images inline:
827 834 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
828 835 # for example render an SVG with javascript inside or even render
829 836 # HTML.
830 837 'image/x-icon': ('image/x-icon', 'inline'),
831 838 'image/png': ('image/png', 'inline'),
832 839 'image/gif': ('image/gif', 'inline'),
833 840 'image/jpeg': ('image/jpeg', 'inline'),
834 841 'application/pdf': ('application/pdf', 'inline'),
835 842 }
836 843
837 844 mimetype = file_node.mimetype
838 845 try:
839 846 mimetype, disposition = raw_mimetype_mapping[mimetype]
840 847 except KeyError:
841 848 # we don't know anything special about this, handle it safely
842 849 if file_node.is_binary:
843 850 # do same as download raw for binary files
844 851 mimetype, disposition = 'application/octet-stream', 'attachment'
845 852 else:
846 853 # do not just use the original mimetype, but force text/plain,
847 854 # otherwise it would serve text/html and that might be unsafe.
848 855 # Note: underlying vcs library fakes text/plain mimetype if the
849 856 # mimetype can not be determined and it thinks it is not
850 857 # binary.This might lead to erroneous text display in some
851 858 # cases, but helps in other cases, like with text files
852 859 # without extension.
853 860 mimetype, disposition = 'text/plain', 'inline'
854 861
855 862 if disposition == 'attachment':
856 863 disposition = self._get_attachement_headers(f_path)
857 864
858 865 stream_content = file_node.stream_bytes()
859 866
860 867 response = Response(app_iter=stream_content)
861 868 response.content_disposition = disposition
862 869 response.content_type = mimetype
863 870
864 871 charset = self._get_default_encoding(c)
865 872 if charset:
866 873 response.charset = charset
867 874
868 875 return response
869 876
870 877 @LoginRequired()
871 878 @HasRepoPermissionAnyDecorator(
872 879 'repository.read', 'repository.write', 'repository.admin')
873 880 def repo_file_download(self):
874 881 c = self.load_default_context()
875 882
876 883 commit_id, f_path = self._get_commit_and_path()
877 884 commit = self._get_commit_or_redirect(commit_id)
878 885 file_node = self._get_filenode_or_redirect(commit, f_path)
879 886
880 887 if self.request.GET.get('lf'):
881 888 # only if lf get flag is passed, we download this file
882 889 # as LFS/Largefile
883 890 lf_node = file_node.get_largefile_node()
884 891 if lf_node:
885 892 # overwrite our pointer with the REAL large-file
886 893 file_node = lf_node
887 894
888 895 disposition = self._get_attachement_headers(f_path)
889 896
890 897 stream_content = file_node.stream_bytes()
891 898
892 899 response = Response(app_iter=stream_content)
893 900 response.content_disposition = disposition
894 901 response.content_type = file_node.mimetype
895 902
896 903 charset = self._get_default_encoding(c)
897 904 if charset:
898 905 response.charset = charset
899 906
900 907 return response
901 908
902 909 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
903 910
904 911 cache_seconds = safe_int(
905 912 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
906 913 cache_on = cache_seconds > 0
907 914 log.debug(
908 915 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
909 916 'with caching: %s[TTL: %ss]' % (
910 917 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
911 918
912 919 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
913 920 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
914 921
915 922 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache_on)
916 923 def compute_file_search(_name_hash, _repo_id, _commit_id, _f_path):
917 924 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
918 925 _repo_id, commit_id, f_path)
919 926 try:
920 927 _d, _f = ScmModel().get_quick_filter_nodes(repo_name, _commit_id, _f_path)
921 928 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
922 929 log.exception(safe_str(e))
923 930 h.flash(safe_str(h.escape(e)), category='error')
924 931 raise HTTPFound(h.route_path(
925 932 'repo_files', repo_name=self.db_repo_name,
926 933 commit_id='tip', f_path='/'))
927 934
928 935 return _d + _f
929 936
930 937 result = compute_file_search(self.db_repo.repo_name_hash, self.db_repo.repo_id,
931 938 commit_id, f_path)
932 939 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
933 940
934 941 @LoginRequired()
935 942 @HasRepoPermissionAnyDecorator(
936 943 'repository.read', 'repository.write', 'repository.admin')
937 944 def repo_nodelist(self):
938 945 self.load_default_context()
939 946
940 947 commit_id, f_path = self._get_commit_and_path()
941 948 commit = self._get_commit_or_redirect(commit_id)
942 949
943 950 metadata = self._get_nodelist_at_commit(
944 951 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
945 952 return {'nodes': metadata}
946 953
947 954 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
948 955 items = []
949 956 for name, commit_id in branches_or_tags.items():
950 957 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
951 958 items.append((sym_ref, name, ref_type))
952 959 return items
953 960
954 961 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
955 962 return commit_id
956 963
957 964 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
958 965 return commit_id
959 966
960 967 # NOTE(dan): old code we used in "diff" mode compare
961 968 new_f_path = vcspath.join(name, f_path)
962 969 return u'%s@%s' % (new_f_path, commit_id)
963 970
964 971 def _get_node_history(self, commit_obj, f_path, commits=None):
965 972 """
966 973 get commit history for given node
967 974
968 975 :param commit_obj: commit to calculate history
969 976 :param f_path: path for node to calculate history for
970 977 :param commits: if passed don't calculate history and take
971 978 commits defined in this list
972 979 """
973 980 _ = self.request.translate
974 981
975 982 # calculate history based on tip
976 983 tip = self.rhodecode_vcs_repo.get_commit()
977 984 if commits is None:
978 985 pre_load = ["author", "branch"]
979 986 try:
980 987 commits = tip.get_path_history(f_path, pre_load=pre_load)
981 988 except (NodeDoesNotExistError, CommitError):
982 989 # this node is not present at tip!
983 990 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
984 991
985 992 history = []
986 993 commits_group = ([], _("Changesets"))
987 994 for commit in commits:
988 995 branch = ' (%s)' % commit.branch if commit.branch else ''
989 996 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
990 997 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
991 998 history.append(commits_group)
992 999
993 1000 symbolic_reference = self._symbolic_reference
994 1001
995 1002 if self.rhodecode_vcs_repo.alias == 'svn':
996 1003 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
997 1004 f_path, self.rhodecode_vcs_repo)
998 1005 if adjusted_f_path != f_path:
999 1006 log.debug(
1000 1007 'Recognized svn tag or branch in file "%s", using svn '
1001 1008 'specific symbolic references', f_path)
1002 1009 f_path = adjusted_f_path
1003 1010 symbolic_reference = self._symbolic_reference_svn
1004 1011
1005 1012 branches = self._create_references(
1006 1013 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1007 1014 branches_group = (branches, _("Branches"))
1008 1015
1009 1016 tags = self._create_references(
1010 1017 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1011 1018 tags_group = (tags, _("Tags"))
1012 1019
1013 1020 history.append(branches_group)
1014 1021 history.append(tags_group)
1015 1022
1016 1023 return history, commits
1017 1024
1018 1025 @LoginRequired()
1019 1026 @HasRepoPermissionAnyDecorator(
1020 1027 'repository.read', 'repository.write', 'repository.admin')
1021 1028 def repo_file_history(self):
1022 1029 self.load_default_context()
1023 1030
1024 1031 commit_id, f_path = self._get_commit_and_path()
1025 1032 commit = self._get_commit_or_redirect(commit_id)
1026 1033 file_node = self._get_filenode_or_redirect(commit, f_path)
1027 1034
1028 1035 if file_node.is_file():
1029 1036 file_history, _hist = self._get_node_history(commit, f_path)
1030 1037
1031 1038 res = []
1032 1039 for section_items, section in file_history:
1033 1040 items = []
1034 1041 for obj_id, obj_text, obj_type in section_items:
1035 1042 at_rev = ''
1036 1043 if obj_type in ['branch', 'bookmark', 'tag']:
1037 1044 at_rev = obj_text
1038 1045 entry = {
1039 1046 'id': obj_id,
1040 1047 'text': obj_text,
1041 1048 'type': obj_type,
1042 1049 'at_rev': at_rev
1043 1050 }
1044 1051
1045 1052 items.append(entry)
1046 1053
1047 1054 res.append({
1048 1055 'text': section,
1049 1056 'children': items
1050 1057 })
1051 1058
1052 1059 data = {
1053 1060 'more': False,
1054 1061 'results': res
1055 1062 }
1056 1063 return data
1057 1064
1058 1065 log.warning('Cannot fetch history for directory')
1059 1066 raise HTTPBadRequest()
1060 1067
1061 1068 @LoginRequired()
1062 1069 @HasRepoPermissionAnyDecorator(
1063 1070 'repository.read', 'repository.write', 'repository.admin')
1064 1071 def repo_file_authors(self):
1065 1072 c = self.load_default_context()
1066 1073
1067 1074 commit_id, f_path = self._get_commit_and_path()
1068 1075 commit = self._get_commit_or_redirect(commit_id)
1069 1076 file_node = self._get_filenode_or_redirect(commit, f_path)
1070 1077
1071 1078 if not file_node.is_file():
1072 1079 raise HTTPBadRequest()
1073 1080
1074 1081 c.file_last_commit = file_node.last_commit
1075 1082 if self.request.GET.get('annotate') == '1':
1076 1083 # use _hist from annotation if annotation mode is on
1077 1084 commit_ids = set(x[1] for x in file_node.annotate)
1078 1085 _hist = (
1079 1086 self.rhodecode_vcs_repo.get_commit(commit_id)
1080 1087 for commit_id in commit_ids)
1081 1088 else:
1082 1089 _f_history, _hist = self._get_node_history(commit, f_path)
1083 1090 c.file_author = False
1084 1091
1085 1092 unique = collections.OrderedDict()
1086 1093 for commit in _hist:
1087 1094 author = commit.author
1088 1095 if author not in unique:
1089 1096 unique[commit.author] = [
1090 1097 h.email(author),
1091 1098 h.person(author, 'username_or_name_or_email'),
1092 1099 1 # counter
1093 1100 ]
1094 1101
1095 1102 else:
1096 1103 # increase counter
1097 1104 unique[commit.author][2] += 1
1098 1105
1099 1106 c.authors = [val for val in unique.values()]
1100 1107
1101 1108 return self._get_template_context(c)
1102 1109
1103 1110 @LoginRequired()
1104 1111 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1105 1112 def repo_files_check_head(self):
1106 1113 self.load_default_context()
1107 1114
1108 1115 commit_id, f_path = self._get_commit_and_path()
1109 1116 _branch_name, _sha_commit_id, is_head = \
1110 1117 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1111 1118 landing_ref=self.db_repo.landing_ref_name)
1112 1119
1113 1120 new_path = self.request.POST.get('path')
1114 1121 operation = self.request.POST.get('operation')
1115 1122 path_exist = ''
1116 1123
1117 1124 if new_path and operation in ['create', 'upload']:
1118 1125 new_f_path = os.path.join(f_path.lstrip('/'), new_path)
1119 1126 try:
1120 1127 commit_obj = self.rhodecode_vcs_repo.get_commit(commit_id)
1121 1128 # NOTE(dan): construct whole path without leading /
1122 1129 file_node = commit_obj.get_node(new_f_path)
1123 1130 if file_node is not None:
1124 1131 path_exist = new_f_path
1125 1132 except EmptyRepositoryError:
1126 1133 pass
1127 1134 except Exception:
1128 1135 pass
1129 1136
1130 1137 return {
1131 1138 'branch': _branch_name,
1132 1139 'sha': _sha_commit_id,
1133 1140 'is_head': is_head,
1134 1141 'path_exists': path_exist
1135 1142 }
1136 1143
1137 1144 @LoginRequired()
1138 1145 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1139 1146 def repo_files_remove_file(self):
1140 1147 _ = self.request.translate
1141 1148 c = self.load_default_context()
1142 1149 commit_id, f_path = self._get_commit_and_path()
1143 1150
1144 1151 self._ensure_not_locked()
1145 1152 _branch_name, _sha_commit_id, is_head = \
1146 1153 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1147 1154 landing_ref=self.db_repo.landing_ref_name)
1148 1155
1149 1156 self.forbid_non_head(is_head, f_path)
1150 1157 self.check_branch_permission(_branch_name)
1151 1158
1152 1159 c.commit = self._get_commit_or_redirect(commit_id)
1153 1160 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1154 1161
1155 1162 c.default_message = _(
1156 1163 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1157 1164 c.f_path = f_path
1158 1165
1159 1166 return self._get_template_context(c)
1160 1167
1161 1168 @LoginRequired()
1162 1169 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1163 1170 @CSRFRequired()
1164 1171 def repo_files_delete_file(self):
1165 1172 _ = self.request.translate
1166 1173
1167 1174 c = self.load_default_context()
1168 1175 commit_id, f_path = self._get_commit_and_path()
1169 1176
1170 1177 self._ensure_not_locked()
1171 1178 _branch_name, _sha_commit_id, is_head = \
1172 1179 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1173 1180 landing_ref=self.db_repo.landing_ref_name)
1174 1181
1175 1182 self.forbid_non_head(is_head, f_path)
1176 1183 self.check_branch_permission(_branch_name)
1177 1184
1178 1185 c.commit = self._get_commit_or_redirect(commit_id)
1179 1186 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1180 1187
1181 1188 c.default_message = _(
1182 1189 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1183 1190 c.f_path = f_path
1184 1191 node_path = f_path
1185 1192 author = self._rhodecode_db_user.full_contact
1186 1193 message = self.request.POST.get('message') or c.default_message
1187 1194 try:
1188 1195 nodes = {
1189 1196 node_path: {
1190 1197 'content': ''
1191 1198 }
1192 1199 }
1193 1200 ScmModel().delete_nodes(
1194 1201 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1195 1202 message=message,
1196 1203 nodes=nodes,
1197 1204 parent_commit=c.commit,
1198 1205 author=author,
1199 1206 )
1200 1207
1201 1208 h.flash(
1202 1209 _('Successfully deleted file `{}`').format(
1203 1210 h.escape(f_path)), category='success')
1204 1211 except Exception:
1205 1212 log.exception('Error during commit operation')
1206 1213 h.flash(_('Error occurred during commit'), category='error')
1207 1214 raise HTTPFound(
1208 1215 h.route_path('repo_commit', repo_name=self.db_repo_name,
1209 1216 commit_id='tip'))
1210 1217
1211 1218 @LoginRequired()
1212 1219 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1213 1220 def repo_files_edit_file(self):
1214 1221 _ = self.request.translate
1215 1222 c = self.load_default_context()
1216 1223 commit_id, f_path = self._get_commit_and_path()
1217 1224
1218 1225 self._ensure_not_locked()
1219 1226 _branch_name, _sha_commit_id, is_head = \
1220 1227 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1221 1228 landing_ref=self.db_repo.landing_ref_name)
1222 1229
1223 1230 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1224 1231 self.check_branch_permission(_branch_name, commit_id=commit_id)
1225 1232
1226 1233 c.commit = self._get_commit_or_redirect(commit_id)
1227 1234 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1228 1235
1229 1236 if c.file.is_binary:
1230 1237 files_url = h.route_path(
1231 1238 'repo_files',
1232 1239 repo_name=self.db_repo_name,
1233 1240 commit_id=c.commit.raw_id, f_path=f_path)
1234 1241 raise HTTPFound(files_url)
1235 1242
1236 1243 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1237 1244 c.f_path = f_path
1238 1245
1239 1246 return self._get_template_context(c)
1240 1247
1241 1248 @LoginRequired()
1242 1249 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1243 1250 @CSRFRequired()
1244 1251 def repo_files_update_file(self):
1245 1252 _ = self.request.translate
1246 1253 c = self.load_default_context()
1247 1254 commit_id, f_path = self._get_commit_and_path()
1248 1255
1249 1256 self._ensure_not_locked()
1250 1257
1251 1258 c.commit = self._get_commit_or_redirect(commit_id)
1252 1259 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1253 1260
1254 1261 if c.file.is_binary:
1255 1262 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1256 1263 commit_id=c.commit.raw_id, f_path=f_path))
1257 1264
1258 1265 _branch_name, _sha_commit_id, is_head = \
1259 1266 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1260 1267 landing_ref=self.db_repo.landing_ref_name)
1261 1268
1262 1269 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1263 1270 self.check_branch_permission(_branch_name, commit_id=commit_id)
1264 1271
1265 1272 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1266 1273 c.f_path = f_path
1267 1274
1268 1275 old_content = c.file.content
1269 1276 sl = old_content.splitlines(1)
1270 1277 first_line = sl[0] if sl else ''
1271 1278
1272 1279 r_post = self.request.POST
1273 1280 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1274 1281 line_ending_mode = detect_mode(first_line, 0)
1275 1282 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1276 1283
1277 1284 message = r_post.get('message') or c.default_message
1278 1285 org_node_path = c.file.unicode_path
1279 1286 filename = r_post['filename']
1280 1287
1281 1288 root_path = c.file.dir_path
1282 1289 pure_path = self.create_pure_path(root_path, filename)
1283 1290 node_path = safe_unicode(bytes(pure_path))
1284 1291
1285 1292 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1286 1293 commit_id=commit_id)
1287 1294 if content == old_content and node_path == org_node_path:
1288 1295 h.flash(_('No changes detected on {}').format(h.escape(org_node_path)),
1289 1296 category='warning')
1290 1297 raise HTTPFound(default_redirect_url)
1291 1298
1292 1299 try:
1293 1300 mapping = {
1294 1301 org_node_path: {
1295 1302 'org_filename': org_node_path,
1296 1303 'filename': node_path,
1297 1304 'content': content,
1298 1305 'lexer': '',
1299 1306 'op': 'mod',
1300 1307 'mode': c.file.mode
1301 1308 }
1302 1309 }
1303 1310
1304 1311 commit = ScmModel().update_nodes(
1305 1312 user=self._rhodecode_db_user.user_id,
1306 1313 repo=self.db_repo,
1307 1314 message=message,
1308 1315 nodes=mapping,
1309 1316 parent_commit=c.commit,
1310 1317 )
1311 1318
1312 1319 h.flash(_('Successfully committed changes to file `{}`').format(
1313 1320 h.escape(f_path)), category='success')
1314 1321 default_redirect_url = h.route_path(
1315 1322 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1316 1323
1317 1324 except Exception:
1318 1325 log.exception('Error occurred during commit')
1319 1326 h.flash(_('Error occurred during commit'), category='error')
1320 1327
1321 1328 raise HTTPFound(default_redirect_url)
1322 1329
1323 1330 @LoginRequired()
1324 1331 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1325 1332 def repo_files_add_file(self):
1326 1333 _ = self.request.translate
1327 1334 c = self.load_default_context()
1328 1335 commit_id, f_path = self._get_commit_and_path()
1329 1336
1330 1337 self._ensure_not_locked()
1331 1338
1332 1339 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1333 1340 if c.commit is None:
1334 1341 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1335 1342
1336 1343 if self.rhodecode_vcs_repo.is_empty():
1337 1344 # for empty repository we cannot check for current branch, we rely on
1338 1345 # c.commit.branch instead
1339 1346 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1340 1347 else:
1341 1348 _branch_name, _sha_commit_id, is_head = \
1342 1349 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1343 1350 landing_ref=self.db_repo.landing_ref_name)
1344 1351
1345 1352 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1346 1353 self.check_branch_permission(_branch_name, commit_id=commit_id)
1347 1354
1348 1355 c.default_message = (_('Added file via RhodeCode Enterprise'))
1349 1356 c.f_path = f_path.lstrip('/') # ensure not relative path
1350 1357
1351 1358 return self._get_template_context(c)
1352 1359
1353 1360 @LoginRequired()
1354 1361 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1355 1362 @CSRFRequired()
1356 1363 def repo_files_create_file(self):
1357 1364 _ = self.request.translate
1358 1365 c = self.load_default_context()
1359 1366 commit_id, f_path = self._get_commit_and_path()
1360 1367
1361 1368 self._ensure_not_locked()
1362 1369
1363 1370 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1364 1371 if c.commit is None:
1365 1372 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1366 1373
1367 1374 # calculate redirect URL
1368 1375 if self.rhodecode_vcs_repo.is_empty():
1369 1376 default_redirect_url = h.route_path(
1370 1377 'repo_summary', repo_name=self.db_repo_name)
1371 1378 else:
1372 1379 default_redirect_url = h.route_path(
1373 1380 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1374 1381
1375 1382 if self.rhodecode_vcs_repo.is_empty():
1376 1383 # for empty repository we cannot check for current branch, we rely on
1377 1384 # c.commit.branch instead
1378 1385 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1379 1386 else:
1380 1387 _branch_name, _sha_commit_id, is_head = \
1381 1388 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1382 1389 landing_ref=self.db_repo.landing_ref_name)
1383 1390
1384 1391 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1385 1392 self.check_branch_permission(_branch_name, commit_id=commit_id)
1386 1393
1387 1394 c.default_message = (_('Added file via RhodeCode Enterprise'))
1388 1395 c.f_path = f_path
1389 1396
1390 1397 r_post = self.request.POST
1391 1398 message = r_post.get('message') or c.default_message
1392 1399 filename = r_post.get('filename')
1393 1400 unix_mode = 0
1394 1401 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1395 1402
1396 1403 if not filename:
1397 1404 # If there's no commit, redirect to repo summary
1398 1405 if type(c.commit) is EmptyCommit:
1399 1406 redirect_url = h.route_path(
1400 1407 'repo_summary', repo_name=self.db_repo_name)
1401 1408 else:
1402 1409 redirect_url = default_redirect_url
1403 1410 h.flash(_('No filename specified'), category='warning')
1404 1411 raise HTTPFound(redirect_url)
1405 1412
1406 1413 root_path = f_path
1407 1414 pure_path = self.create_pure_path(root_path, filename)
1408 1415 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1409 1416
1410 1417 author = self._rhodecode_db_user.full_contact
1411 1418 nodes = {
1412 1419 node_path: {
1413 1420 'content': content
1414 1421 }
1415 1422 }
1416 1423
1417 1424 try:
1418 1425
1419 1426 commit = ScmModel().create_nodes(
1420 1427 user=self._rhodecode_db_user.user_id,
1421 1428 repo=self.db_repo,
1422 1429 message=message,
1423 1430 nodes=nodes,
1424 1431 parent_commit=c.commit,
1425 1432 author=author,
1426 1433 )
1427 1434
1428 1435 h.flash(_('Successfully committed new file `{}`').format(
1429 1436 h.escape(node_path)), category='success')
1430 1437
1431 1438 default_redirect_url = h.route_path(
1432 1439 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1433 1440
1434 1441 except NonRelativePathError:
1435 1442 log.exception('Non Relative path found')
1436 1443 h.flash(_('The location specified must be a relative path and must not '
1437 1444 'contain .. in the path'), category='warning')
1438 1445 raise HTTPFound(default_redirect_url)
1439 1446 except (NodeError, NodeAlreadyExistsError) as e:
1440 1447 h.flash(_(h.escape(e)), category='error')
1441 1448 except Exception:
1442 1449 log.exception('Error occurred during commit')
1443 1450 h.flash(_('Error occurred during commit'), category='error')
1444 1451
1445 1452 raise HTTPFound(default_redirect_url)
1446 1453
1447 1454 @LoginRequired()
1448 1455 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1449 1456 @CSRFRequired()
1450 1457 def repo_files_upload_file(self):
1451 1458 _ = self.request.translate
1452 1459 c = self.load_default_context()
1453 1460 commit_id, f_path = self._get_commit_and_path()
1454 1461
1455 1462 self._ensure_not_locked()
1456 1463
1457 1464 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1458 1465 if c.commit is None:
1459 1466 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1460 1467
1461 1468 # calculate redirect URL
1462 1469 if self.rhodecode_vcs_repo.is_empty():
1463 1470 default_redirect_url = h.route_path(
1464 1471 'repo_summary', repo_name=self.db_repo_name)
1465 1472 else:
1466 1473 default_redirect_url = h.route_path(
1467 1474 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1468 1475
1469 1476 if self.rhodecode_vcs_repo.is_empty():
1470 1477 # for empty repository we cannot check for current branch, we rely on
1471 1478 # c.commit.branch instead
1472 1479 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1473 1480 else:
1474 1481 _branch_name, _sha_commit_id, is_head = \
1475 1482 self._is_valid_head(commit_id, self.rhodecode_vcs_repo,
1476 1483 landing_ref=self.db_repo.landing_ref_name)
1477 1484
1478 1485 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1479 1486 if error:
1480 1487 return {
1481 1488 'error': error,
1482 1489 'redirect_url': default_redirect_url
1483 1490 }
1484 1491 error = self.check_branch_permission(_branch_name, json_mode=True)
1485 1492 if error:
1486 1493 return {
1487 1494 'error': error,
1488 1495 'redirect_url': default_redirect_url
1489 1496 }
1490 1497
1491 1498 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1492 1499 c.f_path = f_path
1493 1500
1494 1501 r_post = self.request.POST
1495 1502
1496 1503 message = c.default_message
1497 1504 user_message = r_post.getall('message')
1498 1505 if isinstance(user_message, list) and user_message:
1499 1506 # we take the first from duplicated results if it's not empty
1500 1507 message = user_message[0] if user_message[0] else message
1501 1508
1502 1509 nodes = {}
1503 1510
1504 1511 for file_obj in r_post.getall('files_upload') or []:
1505 1512 content = file_obj.file
1506 1513 filename = file_obj.filename
1507 1514
1508 1515 root_path = f_path
1509 1516 pure_path = self.create_pure_path(root_path, filename)
1510 1517 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1511 1518
1512 1519 nodes[node_path] = {
1513 1520 'content': content
1514 1521 }
1515 1522
1516 1523 if not nodes:
1517 1524 error = 'missing files'
1518 1525 return {
1519 1526 'error': error,
1520 1527 'redirect_url': default_redirect_url
1521 1528 }
1522 1529
1523 1530 author = self._rhodecode_db_user.full_contact
1524 1531
1525 1532 try:
1526 1533 commit = ScmModel().create_nodes(
1527 1534 user=self._rhodecode_db_user.user_id,
1528 1535 repo=self.db_repo,
1529 1536 message=message,
1530 1537 nodes=nodes,
1531 1538 parent_commit=c.commit,
1532 1539 author=author,
1533 1540 )
1534 1541 if len(nodes) == 1:
1535 1542 flash_message = _('Successfully committed {} new files').format(len(nodes))
1536 1543 else:
1537 1544 flash_message = _('Successfully committed 1 new file')
1538 1545
1539 1546 h.flash(flash_message, category='success')
1540 1547
1541 1548 default_redirect_url = h.route_path(
1542 1549 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1543 1550
1544 1551 except NonRelativePathError:
1545 1552 log.exception('Non Relative path found')
1546 1553 error = _('The location specified must be a relative path and must not '
1547 1554 'contain .. in the path')
1548 1555 h.flash(error, category='warning')
1549 1556
1550 1557 return {
1551 1558 'error': error,
1552 1559 'redirect_url': default_redirect_url
1553 1560 }
1554 1561 except (NodeError, NodeAlreadyExistsError) as e:
1555 1562 error = h.escape(e)
1556 1563 h.flash(error, category='error')
1557 1564
1558 1565 return {
1559 1566 'error': error,
1560 1567 'redirect_url': default_redirect_url
1561 1568 }
1562 1569 except Exception:
1563 1570 log.exception('Error occurred during commit')
1564 1571 error = _('Error occurred during commit')
1565 1572 h.flash(error, category='error')
1566 1573 return {
1567 1574 'error': error,
1568 1575 'redirect_url': default_redirect_url
1569 1576 }
1570 1577
1571 1578 return {
1572 1579 'error': None,
1573 1580 'redirect_url': default_redirect_url
1574 1581 }
@@ -1,254 +1,254 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import datetime
23 23 import formencode
24 24 import formencode.htmlfill
25 25
26 26 from pyramid.httpexceptions import HTTPFound
27 27
28 28 from pyramid.renderers import render
29 29 from pyramid.response import Response
30 30
31 31 from rhodecode import events
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33 from rhodecode.lib.auth import (
34 34 LoginRequired, HasRepoPermissionAnyDecorator, NotAnonymous,
35 35 HasRepoPermissionAny, HasPermissionAnyDecorator, CSRFRequired)
36 36 import rhodecode.lib.helpers as h
37 37 from rhodecode.lib.celerylib.utils import get_task_id
38 38 from rhodecode.model.db import coalesce, or_, Repository, RepoGroup
39 39 from rhodecode.model.permission import PermissionModel
40 40 from rhodecode.model.repo import RepoModel
41 41 from rhodecode.model.forms import RepoForkForm
42 42 from rhodecode.model.scm import ScmModel, RepoGroupList
43 43 from rhodecode.lib.utils2 import safe_int, safe_unicode
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class RepoForksView(RepoAppView, DataGridAppView):
49 49
50 50 def load_default_context(self):
51 51 c = self._get_local_tmpl_context(include_app_defaults=True)
52 52 c.rhodecode_repo = self.rhodecode_vcs_repo
53 53
54 54 acl_groups = RepoGroupList(
55 55 RepoGroup.query().all(),
56 56 perm_set=['group.write', 'group.admin'])
57 57 c.repo_groups = RepoGroup.groups_choices(groups=acl_groups)
58 58 c.repo_groups_choices = map(lambda k: safe_unicode(k[0]), c.repo_groups)
59 59
60 60 c.personal_repo_group = c.rhodecode_user.personal_repo_group
61 61
62 62 return c
63 63
64 64 @LoginRequired()
65 65 @HasRepoPermissionAnyDecorator(
66 66 'repository.read', 'repository.write', 'repository.admin')
67 67 def repo_forks_show_all(self):
68 68 c = self.load_default_context()
69 69 return self._get_template_context(c)
70 70
71 71 @LoginRequired()
72 72 @HasRepoPermissionAnyDecorator(
73 73 'repository.read', 'repository.write', 'repository.admin')
74 74 def repo_forks_data(self):
75 75 _ = self.request.translate
76 76 self.load_default_context()
77 77 column_map = {
78 78 'fork_name': 'repo_name',
79 79 'fork_date': 'created_on',
80 80 'last_activity': 'updated_on'
81 81 }
82 82 draw, start, limit = self._extract_chunk(self.request)
83 83 search_q, order_by, order_dir = self._extract_ordering(
84 84 self.request, column_map=column_map)
85 85
86 86 acl_check = HasRepoPermissionAny(
87 87 'repository.read', 'repository.write', 'repository.admin')
88 88 repo_id = self.db_repo.repo_id
89 89 allowed_ids = [-1]
90 90 for f in Repository.query().filter(Repository.fork_id == repo_id):
91 91 if acl_check(f.repo_name, 'get forks check'):
92 92 allowed_ids.append(f.repo_id)
93 93
94 94 forks_data_total_count = Repository.query()\
95 95 .filter(Repository.fork_id == repo_id)\
96 96 .filter(Repository.repo_id.in_(allowed_ids))\
97 97 .count()
98 98
99 99 # json generate
100 100 base_q = Repository.query()\
101 101 .filter(Repository.fork_id == repo_id)\
102 102 .filter(Repository.repo_id.in_(allowed_ids))\
103 103
104 104 if search_q:
105 105 like_expression = u'%{}%'.format(safe_unicode(search_q))
106 106 base_q = base_q.filter(or_(
107 107 Repository.repo_name.ilike(like_expression),
108 108 Repository.description.ilike(like_expression),
109 109 ))
110 110
111 111 forks_data_total_filtered_count = base_q.count()
112 112
113 113 sort_col = getattr(Repository, order_by, None)
114 114 if sort_col:
115 115 if order_dir == 'asc':
116 116 # handle null values properly to order by NULL last
117 117 if order_by in ['last_activity']:
118 118 sort_col = coalesce(sort_col, datetime.date.max)
119 119 sort_col = sort_col.asc()
120 120 else:
121 121 # handle null values properly to order by NULL last
122 122 if order_by in ['last_activity']:
123 123 sort_col = coalesce(sort_col, datetime.date.min)
124 124 sort_col = sort_col.desc()
125 125
126 126 base_q = base_q.order_by(sort_col)
127 127 base_q = base_q.offset(start).limit(limit)
128 128
129 129 fork_list = base_q.all()
130 130
131 131 def fork_actions(fork):
132 132 url_link = h.route_path(
133 133 'repo_compare',
134 134 repo_name=fork.repo_name,
135 135 source_ref_type=self.db_repo.landing_ref_type,
136 136 source_ref=self.db_repo.landing_ref_name,
137 137 target_ref_type=self.db_repo.landing_ref_type,
138 138 target_ref=self.db_repo.landing_ref_name,
139 139 _query=dict(merge=1, target_repo=f.repo_name))
140 140 return h.link_to(_('Compare fork'), url_link, class_='btn-link')
141 141
142 142 def fork_name(fork):
143 143 return h.link_to(fork.repo_name,
144 144 h.route_path('repo_summary', repo_name=fork.repo_name))
145 145
146 146 forks_data = []
147 147 for fork in fork_list:
148 148 forks_data.append({
149 149 "username": h.gravatar_with_user(self.request, fork.user.username),
150 150 "fork_name": fork_name(fork),
151 151 "description": fork.description_safe,
152 152 "fork_date": h.age_component(fork.created_on, time_is_local=True),
153 153 "last_activity": h.format_date(fork.updated_on),
154 154 "action": fork_actions(fork),
155 155 })
156 156
157 157 data = ({
158 158 'draw': draw,
159 159 'data': forks_data,
160 160 'recordsTotal': forks_data_total_count,
161 161 'recordsFiltered': forks_data_total_filtered_count,
162 162 })
163 163
164 164 return data
165 165
166 166 @LoginRequired()
167 167 @NotAnonymous()
168 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
168 @HasPermissionAnyDecorator('hg.admin', PermissionModel.FORKING_ENABLED)
169 169 @HasRepoPermissionAnyDecorator(
170 170 'repository.read', 'repository.write', 'repository.admin')
171 171 def repo_fork_new(self):
172 172 c = self.load_default_context()
173 173
174 174 defaults = RepoModel()._get_defaults(self.db_repo_name)
175 175 # alter the description to indicate a fork
176 176 defaults['description'] = (
177 177 'fork of repository: %s \n%s' % (
178 178 defaults['repo_name'], defaults['description']))
179 179 # add suffix to fork
180 180 defaults['repo_name'] = '%s-fork' % defaults['repo_name']
181 181
182 182 data = render('rhodecode:templates/forks/fork.mako',
183 183 self._get_template_context(c), self.request)
184 184 html = formencode.htmlfill.render(
185 185 data,
186 186 defaults=defaults,
187 187 encoding="UTF-8",
188 188 force_defaults=False
189 189 )
190 190 return Response(html)
191 191
192 192 @LoginRequired()
193 193 @NotAnonymous()
194 @HasPermissionAnyDecorator('hg.admin', 'hg.fork.repository')
194 @HasPermissionAnyDecorator('hg.admin', PermissionModel.FORKING_ENABLED)
195 195 @HasRepoPermissionAnyDecorator(
196 196 'repository.read', 'repository.write', 'repository.admin')
197 197 @CSRFRequired()
198 198 def repo_fork_create(self):
199 199 _ = self.request.translate
200 200 c = self.load_default_context()
201 201
202 202 _form = RepoForkForm(self.request.translate,
203 203 old_data={'repo_type': self.db_repo.repo_type},
204 204 repo_groups=c.repo_groups_choices)()
205 205 post_data = dict(self.request.POST)
206 206
207 207 # forbid injecting other repo by forging a request
208 208 post_data['fork_parent_id'] = self.db_repo.repo_id
209 209 post_data['landing_rev'] = self.db_repo._landing_revision
210 210
211 211 form_result = {}
212 212 task_id = None
213 213 try:
214 214 form_result = _form.to_python(post_data)
215 215 copy_permissions = form_result.get('copy_permissions')
216 216 # create fork is done sometimes async on celery, db transaction
217 217 # management is handled there.
218 218 task = RepoModel().create_fork(
219 219 form_result, c.rhodecode_user.user_id)
220 220
221 221 task_id = get_task_id(task)
222 222 except formencode.Invalid as errors:
223 223 c.rhodecode_db_repo = self.db_repo
224 224
225 225 data = render('rhodecode:templates/forks/fork.mako',
226 226 self._get_template_context(c), self.request)
227 227 html = formencode.htmlfill.render(
228 228 data,
229 229 defaults=errors.value,
230 230 errors=errors.error_dict or {},
231 231 prefix_error=False,
232 232 encoding="UTF-8",
233 233 force_defaults=False
234 234 )
235 235 return Response(html)
236 236 except Exception:
237 237 log.exception(
238 238 u'Exception while trying to fork the repository %s', self.db_repo_name)
239 239 msg = _('An error occurred during repository forking %s') % (self.db_repo_name, )
240 240 h.flash(msg, category='error')
241 241 raise HTTPFound(h.route_path('home'))
242 242
243 243 repo_name = form_result.get('repo_name_full', self.db_repo_name)
244 244
245 245 affected_user_ids = [self._rhodecode_user.user_id]
246 246 if copy_permissions:
247 247 # permission flush is done in repo creating
248 248 pass
249 249
250 250 PermissionModel().trigger_permission_flush(affected_user_ids)
251 251
252 252 raise HTTPFound(
253 253 h.route_path('repo_creating', repo_name=repo_name,
254 254 _query=dict(task_id=task_id)))
@@ -1,1857 +1,1861 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import collections
23 23
24 24 import formencode
25 25 import formencode.htmlfill
26 26 import peppercorn
27 27 from pyramid.httpexceptions import (
28 28 HTTPFound, HTTPNotFound, HTTPForbidden, HTTPBadRequest, HTTPConflict)
29 29
30 30 from pyramid.renderers import render
31 31
32 32 from rhodecode.apps._base import RepoAppView, DataGridAppView
33 33
34 34 from rhodecode.lib import helpers as h, diffs, codeblocks, channelstream
35 35 from rhodecode.lib.base import vcs_operation_context
36 36 from rhodecode.lib.diffs import load_cached_diff, cache_diff, diff_cache_exist
37 37 from rhodecode.lib.exceptions import CommentVersionMismatch
38 38 from rhodecode.lib.ext_json import json
39 39 from rhodecode.lib.auth import (
40 40 LoginRequired, HasRepoPermissionAny, HasRepoPermissionAnyDecorator,
41 41 NotAnonymous, CSRFRequired)
42 42 from rhodecode.lib.utils2 import str2bool, safe_str, safe_unicode, safe_int, aslist
43 43 from rhodecode.lib.vcs.backends.base import (
44 44 EmptyCommit, UpdateFailureReason, unicode_to_reference)
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitDoesNotExistError, RepositoryRequirementError, EmptyRepositoryError)
47 47 from rhodecode.model.changeset_status import ChangesetStatusModel
48 48 from rhodecode.model.comment import CommentsModel
49 49 from rhodecode.model.db import (
50 50 func, false, or_, PullRequest, ChangesetComment, ChangesetStatus, Repository,
51 51 PullRequestReviewers)
52 52 from rhodecode.model.forms import PullRequestForm
53 53 from rhodecode.model.meta import Session
54 54 from rhodecode.model.pull_request import PullRequestModel, MergeCheck
55 55 from rhodecode.model.scm import ScmModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class RepoPullRequestsView(RepoAppView, DataGridAppView):
61 61
62 62 def load_default_context(self):
63 63 c = self._get_local_tmpl_context(include_app_defaults=True)
64 64 c.REVIEW_STATUS_APPROVED = ChangesetStatus.STATUS_APPROVED
65 65 c.REVIEW_STATUS_REJECTED = ChangesetStatus.STATUS_REJECTED
66 66 # backward compat., we use for OLD PRs a plain renderer
67 67 c.renderer = 'plain'
68 68 return c
69 69
70 70 def _get_pull_requests_list(
71 71 self, repo_name, source, filter_type, opened_by, statuses):
72 72
73 73 draw, start, limit = self._extract_chunk(self.request)
74 74 search_q, order_by, order_dir = self._extract_ordering(self.request)
75 75 _render = self.request.get_partial_renderer(
76 76 'rhodecode:templates/data_table/_dt_elements.mako')
77 77
78 78 # pagination
79 79
80 80 if filter_type == 'awaiting_review':
81 81 pull_requests = PullRequestModel().get_awaiting_review(
82 82 repo_name, search_q=search_q, source=source, opened_by=opened_by,
83 83 statuses=statuses, offset=start, length=limit,
84 84 order_by=order_by, order_dir=order_dir)
85 85 pull_requests_total_count = PullRequestModel().count_awaiting_review(
86 86 repo_name, search_q=search_q, source=source, statuses=statuses,
87 87 opened_by=opened_by)
88 88 elif filter_type == 'awaiting_my_review':
89 89 pull_requests = PullRequestModel().get_awaiting_my_review(
90 90 repo_name, search_q=search_q, source=source, opened_by=opened_by,
91 91 user_id=self._rhodecode_user.user_id, statuses=statuses,
92 92 offset=start, length=limit, order_by=order_by,
93 93 order_dir=order_dir)
94 94 pull_requests_total_count = PullRequestModel().count_awaiting_my_review(
95 95 repo_name, search_q=search_q, source=source, user_id=self._rhodecode_user.user_id,
96 96 statuses=statuses, opened_by=opened_by)
97 97 else:
98 98 pull_requests = PullRequestModel().get_all(
99 99 repo_name, search_q=search_q, source=source, opened_by=opened_by,
100 100 statuses=statuses, offset=start, length=limit,
101 101 order_by=order_by, order_dir=order_dir)
102 102 pull_requests_total_count = PullRequestModel().count_all(
103 103 repo_name, search_q=search_q, source=source, statuses=statuses,
104 104 opened_by=opened_by)
105 105
106 106 data = []
107 107 comments_model = CommentsModel()
108 108 for pr in pull_requests:
109 109 comments_count = comments_model.get_all_comments(
110 110 self.db_repo.repo_id, pull_request=pr,
111 111 include_drafts=False, count_only=True)
112 112
113 113 data.append({
114 114 'name': _render('pullrequest_name',
115 115 pr.pull_request_id, pr.pull_request_state,
116 116 pr.work_in_progress, pr.target_repo.repo_name,
117 117 short=True),
118 118 'name_raw': pr.pull_request_id,
119 119 'status': _render('pullrequest_status',
120 120 pr.calculated_review_status()),
121 121 'title': _render('pullrequest_title', pr.title, pr.description),
122 122 'description': h.escape(pr.description),
123 123 'updated_on': _render('pullrequest_updated_on',
124 124 h.datetime_to_time(pr.updated_on),
125 125 pr.versions_count),
126 126 'updated_on_raw': h.datetime_to_time(pr.updated_on),
127 127 'created_on': _render('pullrequest_updated_on',
128 128 h.datetime_to_time(pr.created_on)),
129 129 'created_on_raw': h.datetime_to_time(pr.created_on),
130 130 'state': pr.pull_request_state,
131 131 'author': _render('pullrequest_author',
132 132 pr.author.full_contact, ),
133 133 'author_raw': pr.author.full_name,
134 134 'comments': _render('pullrequest_comments', comments_count),
135 135 'comments_raw': comments_count,
136 136 'closed': pr.is_closed(),
137 137 })
138 138
139 139 data = ({
140 140 'draw': draw,
141 141 'data': data,
142 142 'recordsTotal': pull_requests_total_count,
143 143 'recordsFiltered': pull_requests_total_count,
144 144 })
145 145 return data
146 146
147 147 @LoginRequired()
148 148 @HasRepoPermissionAnyDecorator(
149 149 'repository.read', 'repository.write', 'repository.admin')
150 150 def pull_request_list(self):
151 151 c = self.load_default_context()
152 152
153 153 req_get = self.request.GET
154 154 c.source = str2bool(req_get.get('source'))
155 155 c.closed = str2bool(req_get.get('closed'))
156 156 c.my = str2bool(req_get.get('my'))
157 157 c.awaiting_review = str2bool(req_get.get('awaiting_review'))
158 158 c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
159 159
160 160 c.active = 'open'
161 161 if c.my:
162 162 c.active = 'my'
163 163 if c.closed:
164 164 c.active = 'closed'
165 165 if c.awaiting_review and not c.source:
166 166 c.active = 'awaiting'
167 167 if c.source and not c.awaiting_review:
168 168 c.active = 'source'
169 169 if c.awaiting_my_review:
170 170 c.active = 'awaiting_my'
171 171
172 172 return self._get_template_context(c)
173 173
174 174 @LoginRequired()
175 175 @HasRepoPermissionAnyDecorator(
176 176 'repository.read', 'repository.write', 'repository.admin')
177 177 def pull_request_list_data(self):
178 178 self.load_default_context()
179 179
180 180 # additional filters
181 181 req_get = self.request.GET
182 182 source = str2bool(req_get.get('source'))
183 183 closed = str2bool(req_get.get('closed'))
184 184 my = str2bool(req_get.get('my'))
185 185 awaiting_review = str2bool(req_get.get('awaiting_review'))
186 186 awaiting_my_review = str2bool(req_get.get('awaiting_my_review'))
187 187
188 188 filter_type = 'awaiting_review' if awaiting_review \
189 189 else 'awaiting_my_review' if awaiting_my_review \
190 190 else None
191 191
192 192 opened_by = None
193 193 if my:
194 194 opened_by = [self._rhodecode_user.user_id]
195 195
196 196 statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN]
197 197 if closed:
198 198 statuses = [PullRequest.STATUS_CLOSED]
199 199
200 200 data = self._get_pull_requests_list(
201 201 repo_name=self.db_repo_name, source=source,
202 202 filter_type=filter_type, opened_by=opened_by, statuses=statuses)
203 203
204 204 return data
205 205
206 206 def _is_diff_cache_enabled(self, target_repo):
207 207 caching_enabled = self._get_general_setting(
208 208 target_repo, 'rhodecode_diff_cache')
209 209 log.debug('Diff caching enabled: %s', caching_enabled)
210 210 return caching_enabled
211 211
212 212 def _get_diffset(self, source_repo_name, source_repo,
213 213 ancestor_commit,
214 214 source_ref_id, target_ref_id,
215 215 target_commit, source_commit, diff_limit, file_limit,
216 216 fulldiff, hide_whitespace_changes, diff_context, use_ancestor=True):
217 217
218 218 target_commit_final = target_commit
219 219 source_commit_final = source_commit
220 220
221 221 if use_ancestor:
222 222 # we might want to not use it for versions
223 223 target_ref_id = ancestor_commit.raw_id
224 224 target_commit_final = ancestor_commit
225 225
226 226 vcs_diff = PullRequestModel().get_diff(
227 227 source_repo, source_ref_id, target_ref_id,
228 228 hide_whitespace_changes, diff_context)
229 229
230 230 diff_processor = diffs.DiffProcessor(
231 231 vcs_diff, format='newdiff', diff_limit=diff_limit,
232 232 file_limit=file_limit, show_full_diff=fulldiff)
233 233
234 234 _parsed = diff_processor.prepare()
235 235
236 236 diffset = codeblocks.DiffSet(
237 237 repo_name=self.db_repo_name,
238 238 source_repo_name=source_repo_name,
239 239 source_node_getter=codeblocks.diffset_node_getter(target_commit_final),
240 240 target_node_getter=codeblocks.diffset_node_getter(source_commit_final),
241 241 )
242 242 diffset = self.path_filter.render_patchset_filtered(
243 243 diffset, _parsed, target_ref_id, source_ref_id)
244 244
245 245 return diffset
246 246
247 247 def _get_range_diffset(self, source_scm, source_repo,
248 248 commit1, commit2, diff_limit, file_limit,
249 249 fulldiff, hide_whitespace_changes, diff_context):
250 250 vcs_diff = source_scm.get_diff(
251 251 commit1, commit2,
252 252 ignore_whitespace=hide_whitespace_changes,
253 253 context=diff_context)
254 254
255 255 diff_processor = diffs.DiffProcessor(
256 256 vcs_diff, format='newdiff', diff_limit=diff_limit,
257 257 file_limit=file_limit, show_full_diff=fulldiff)
258 258
259 259 _parsed = diff_processor.prepare()
260 260
261 261 diffset = codeblocks.DiffSet(
262 262 repo_name=source_repo.repo_name,
263 263 source_node_getter=codeblocks.diffset_node_getter(commit1),
264 264 target_node_getter=codeblocks.diffset_node_getter(commit2))
265 265
266 266 diffset = self.path_filter.render_patchset_filtered(
267 267 diffset, _parsed, commit1.raw_id, commit2.raw_id)
268 268
269 269 return diffset
270 270
271 271 def register_comments_vars(self, c, pull_request, versions, include_drafts=True):
272 272 comments_model = CommentsModel()
273 273
274 274 # GENERAL COMMENTS with versions #
275 275 q = comments_model._all_general_comments_of_pull_request(pull_request)
276 276 q = q.order_by(ChangesetComment.comment_id.asc())
277 277 if not include_drafts:
278 278 q = q.filter(ChangesetComment.draft == false())
279 279 general_comments = q
280 280
281 281 # pick comments we want to render at current version
282 282 c.comment_versions = comments_model.aggregate_comments(
283 283 general_comments, versions, c.at_version_num)
284 284
285 285 # INLINE COMMENTS with versions #
286 286 q = comments_model._all_inline_comments_of_pull_request(pull_request)
287 287 q = q.order_by(ChangesetComment.comment_id.asc())
288 288 if not include_drafts:
289 289 q = q.filter(ChangesetComment.draft == false())
290 290 inline_comments = q
291 291
292 292 c.inline_versions = comments_model.aggregate_comments(
293 293 inline_comments, versions, c.at_version_num, inline=True)
294 294
295 295 # Comments inline+general
296 296 if c.at_version:
297 297 c.inline_comments_flat = c.inline_versions[c.at_version_num]['display']
298 298 c.comments = c.comment_versions[c.at_version_num]['display']
299 299 else:
300 300 c.inline_comments_flat = c.inline_versions[c.at_version_num]['until']
301 301 c.comments = c.comment_versions[c.at_version_num]['until']
302 302
303 303 return general_comments, inline_comments
304 304
305 305 @LoginRequired()
306 306 @HasRepoPermissionAnyDecorator(
307 307 'repository.read', 'repository.write', 'repository.admin')
308 308 def pull_request_show(self):
309 309 _ = self.request.translate
310 310 c = self.load_default_context()
311 311
312 312 pull_request = PullRequest.get_or_404(
313 313 self.request.matchdict['pull_request_id'])
314 314 pull_request_id = pull_request.pull_request_id
315 315
316 316 c.state_progressing = pull_request.is_state_changing()
317 317 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
318 318
319 319 _new_state = {
320 320 'created': PullRequest.STATE_CREATED,
321 321 }.get(self.request.GET.get('force_state'))
322 322
323 323 if c.is_super_admin and _new_state:
324 324 with pull_request.set_state(PullRequest.STATE_UPDATING, final_state=_new_state):
325 325 h.flash(
326 326 _('Pull Request state was force changed to `{}`').format(_new_state),
327 327 category='success')
328 328 Session().commit()
329 329
330 330 raise HTTPFound(h.route_path(
331 331 'pullrequest_show', repo_name=self.db_repo_name,
332 332 pull_request_id=pull_request_id))
333 333
334 334 version = self.request.GET.get('version')
335 335 from_version = self.request.GET.get('from_version') or version
336 336 merge_checks = self.request.GET.get('merge_checks')
337 337 c.fulldiff = str2bool(self.request.GET.get('fulldiff'))
338 338 force_refresh = str2bool(self.request.GET.get('force_refresh'))
339 339 c.range_diff_on = self.request.GET.get('range-diff') == "1"
340 340
341 341 # fetch global flags of ignore ws or context lines
342 342 diff_context = diffs.get_diff_context(self.request)
343 343 hide_whitespace_changes = diffs.get_diff_whitespace_flag(self.request)
344 344
345 345 (pull_request_latest,
346 346 pull_request_at_ver,
347 347 pull_request_display_obj,
348 348 at_version) = PullRequestModel().get_pr_version(
349 349 pull_request_id, version=version)
350 350
351 351 pr_closed = pull_request_latest.is_closed()
352 352
353 353 if pr_closed and (version or from_version):
354 354 # not allow to browse versions for closed PR
355 355 raise HTTPFound(h.route_path(
356 356 'pullrequest_show', repo_name=self.db_repo_name,
357 357 pull_request_id=pull_request_id))
358 358
359 359 versions = pull_request_display_obj.versions()
360 360
361 361 c.commit_versions = PullRequestModel().pr_commits_versions(versions)
362 362
363 363 # used to store per-commit range diffs
364 364 c.changes = collections.OrderedDict()
365 365
366 366 c.at_version = at_version
367 367 c.at_version_num = (at_version
368 368 if at_version and at_version != PullRequest.LATEST_VER
369 369 else None)
370 370
371 371 c.at_version_index = ChangesetComment.get_index_from_version(
372 372 c.at_version_num, versions)
373 373
374 374 (prev_pull_request_latest,
375 375 prev_pull_request_at_ver,
376 376 prev_pull_request_display_obj,
377 377 prev_at_version) = PullRequestModel().get_pr_version(
378 378 pull_request_id, version=from_version)
379 379
380 380 c.from_version = prev_at_version
381 381 c.from_version_num = (prev_at_version
382 382 if prev_at_version and prev_at_version != PullRequest.LATEST_VER
383 383 else None)
384 384 c.from_version_index = ChangesetComment.get_index_from_version(
385 385 c.from_version_num, versions)
386 386
387 387 # define if we're in COMPARE mode or VIEW at version mode
388 388 compare = at_version != prev_at_version
389 389
390 390 # pull_requests repo_name we opened it against
391 391 # ie. target_repo must match
392 392 if self.db_repo_name != pull_request_at_ver.target_repo.repo_name:
393 393 log.warning('Mismatch between the current repo: %s, and target %s',
394 394 self.db_repo_name, pull_request_at_ver.target_repo.repo_name)
395 395 raise HTTPNotFound()
396 396
397 397 c.shadow_clone_url = PullRequestModel().get_shadow_clone_url(pull_request_at_ver)
398 398
399 399 c.pull_request = pull_request_display_obj
400 400 c.renderer = pull_request_at_ver.description_renderer or c.renderer
401 401 c.pull_request_latest = pull_request_latest
402 402
403 403 # inject latest version
404 404 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
405 405 c.versions = versions + [latest_ver]
406 406
407 407 if compare or (at_version and not at_version == PullRequest.LATEST_VER):
408 408 c.allowed_to_change_status = False
409 409 c.allowed_to_update = False
410 410 c.allowed_to_merge = False
411 411 c.allowed_to_delete = False
412 412 c.allowed_to_comment = False
413 413 c.allowed_to_close = False
414 414 else:
415 415 can_change_status = PullRequestModel().check_user_change_status(
416 416 pull_request_at_ver, self._rhodecode_user)
417 417 c.allowed_to_change_status = can_change_status and not pr_closed
418 418
419 419 c.allowed_to_update = PullRequestModel().check_user_update(
420 420 pull_request_latest, self._rhodecode_user) and not pr_closed
421 421 c.allowed_to_merge = PullRequestModel().check_user_merge(
422 422 pull_request_latest, self._rhodecode_user) and not pr_closed
423 423 c.allowed_to_delete = PullRequestModel().check_user_delete(
424 424 pull_request_latest, self._rhodecode_user) and not pr_closed
425 425 c.allowed_to_comment = not pr_closed
426 426 c.allowed_to_close = c.allowed_to_merge and not pr_closed
427 427
428 428 c.forbid_adding_reviewers = False
429 429
430 430 if pull_request_latest.reviewer_data and \
431 431 'rules' in pull_request_latest.reviewer_data:
432 432 rules = pull_request_latest.reviewer_data['rules'] or {}
433 433 try:
434 434 c.forbid_adding_reviewers = rules.get('forbid_adding_reviewers')
435 435 except Exception:
436 436 pass
437 437
438 438 # check merge capabilities
439 439 _merge_check = MergeCheck.validate(
440 440 pull_request_latest, auth_user=self._rhodecode_user,
441 441 translator=self.request.translate,
442 442 force_shadow_repo_refresh=force_refresh)
443 443
444 444 c.pr_merge_errors = _merge_check.error_details
445 445 c.pr_merge_possible = not _merge_check.failed
446 446 c.pr_merge_message = _merge_check.merge_msg
447 447 c.pr_merge_source_commit = _merge_check.source_commit
448 448 c.pr_merge_target_commit = _merge_check.target_commit
449 449
450 450 c.pr_merge_info = MergeCheck.get_merge_conditions(
451 451 pull_request_latest, translator=self.request.translate)
452 452
453 453 c.pull_request_review_status = _merge_check.review_status
454 454 if merge_checks:
455 455 self.request.override_renderer = \
456 456 'rhodecode:templates/pullrequests/pullrequest_merge_checks.mako'
457 457 return self._get_template_context(c)
458 458
459 459 c.reviewers_count = pull_request.reviewers_count
460 460 c.observers_count = pull_request.observers_count
461 461
462 462 # reviewers and statuses
463 463 c.pull_request_default_reviewers_data_json = json.dumps(pull_request.reviewer_data)
464 464 c.pull_request_set_reviewers_data_json = collections.OrderedDict({'reviewers': []})
465 465 c.pull_request_set_observers_data_json = collections.OrderedDict({'observers': []})
466 466
467 467 for review_obj, member, reasons, mandatory, status in pull_request_at_ver.reviewers_statuses():
468 468 member_reviewer = h.reviewer_as_json(
469 469 member, reasons=reasons, mandatory=mandatory,
470 470 role=review_obj.role,
471 471 user_group=review_obj.rule_user_group_data()
472 472 )
473 473
474 474 current_review_status = status[0][1].status if status else ChangesetStatus.STATUS_NOT_REVIEWED
475 475 member_reviewer['review_status'] = current_review_status
476 476 member_reviewer['review_status_label'] = h.commit_status_lbl(current_review_status)
477 477 member_reviewer['allowed_to_update'] = c.allowed_to_update
478 478 c.pull_request_set_reviewers_data_json['reviewers'].append(member_reviewer)
479 479
480 480 c.pull_request_set_reviewers_data_json = json.dumps(c.pull_request_set_reviewers_data_json)
481 481
482 482 for observer_obj, member in pull_request_at_ver.observers():
483 483 member_observer = h.reviewer_as_json(
484 484 member, reasons=[], mandatory=False,
485 485 role=observer_obj.role,
486 486 user_group=observer_obj.rule_user_group_data()
487 487 )
488 488 member_observer['allowed_to_update'] = c.allowed_to_update
489 489 c.pull_request_set_observers_data_json['observers'].append(member_observer)
490 490
491 491 c.pull_request_set_observers_data_json = json.dumps(c.pull_request_set_observers_data_json)
492 492
493 493 general_comments, inline_comments = \
494 494 self.register_comments_vars(c, pull_request_latest, versions)
495 495
496 496 # TODOs
497 497 c.unresolved_comments = CommentsModel() \
498 498 .get_pull_request_unresolved_todos(pull_request_latest)
499 499 c.resolved_comments = CommentsModel() \
500 500 .get_pull_request_resolved_todos(pull_request_latest)
501 501
502 502 # Drafts
503 503 c.draft_comments = CommentsModel().get_pull_request_drafts(
504 504 self._rhodecode_db_user.user_id,
505 505 pull_request_latest)
506 506
507 507 # if we use version, then do not show later comments
508 508 # than current version
509 509 display_inline_comments = collections.defaultdict(
510 510 lambda: collections.defaultdict(list))
511 511 for co in inline_comments:
512 512 if c.at_version_num:
513 513 # pick comments that are at least UPTO given version, so we
514 514 # don't render comments for higher version
515 515 should_render = co.pull_request_version_id and \
516 516 co.pull_request_version_id <= c.at_version_num
517 517 else:
518 518 # showing all, for 'latest'
519 519 should_render = True
520 520
521 521 if should_render:
522 522 display_inline_comments[co.f_path][co.line_no].append(co)
523 523
524 524 # load diff data into template context, if we use compare mode then
525 525 # diff is calculated based on changes between versions of PR
526 526
527 527 source_repo = pull_request_at_ver.source_repo
528 528 source_ref_id = pull_request_at_ver.source_ref_parts.commit_id
529 529
530 530 target_repo = pull_request_at_ver.target_repo
531 531 target_ref_id = pull_request_at_ver.target_ref_parts.commit_id
532 532
533 533 if compare:
534 534 # in compare switch the diff base to latest commit from prev version
535 535 target_ref_id = prev_pull_request_display_obj.revisions[0]
536 536
537 537 # despite opening commits for bookmarks/branches/tags, we always
538 538 # convert this to rev to prevent changes after bookmark or branch change
539 539 c.source_ref_type = 'rev'
540 540 c.source_ref = source_ref_id
541 541
542 542 c.target_ref_type = 'rev'
543 543 c.target_ref = target_ref_id
544 544
545 545 c.source_repo = source_repo
546 546 c.target_repo = target_repo
547 547
548 548 c.commit_ranges = []
549 549 source_commit = EmptyCommit()
550 550 target_commit = EmptyCommit()
551 551 c.missing_requirements = False
552 552
553 553 source_scm = source_repo.scm_instance()
554 554 target_scm = target_repo.scm_instance()
555 555
556 556 shadow_scm = None
557 557 try:
558 558 shadow_scm = pull_request_latest.get_shadow_repo()
559 559 except Exception:
560 560 log.debug('Failed to get shadow repo', exc_info=True)
561 561 # try first the existing source_repo, and then shadow
562 562 # repo if we can obtain one
563 563 commits_source_repo = source_scm
564 564 if shadow_scm:
565 565 commits_source_repo = shadow_scm
566 566
567 567 c.commits_source_repo = commits_source_repo
568 568 c.ancestor = None # set it to None, to hide it from PR view
569 569
570 570 # empty version means latest, so we keep this to prevent
571 571 # double caching
572 572 version_normalized = version or PullRequest.LATEST_VER
573 573 from_version_normalized = from_version or PullRequest.LATEST_VER
574 574
575 575 cache_path = self.rhodecode_vcs_repo.get_create_shadow_cache_pr_path(target_repo)
576 576 cache_file_path = diff_cache_exist(
577 577 cache_path, 'pull_request', pull_request_id, version_normalized,
578 578 from_version_normalized, source_ref_id, target_ref_id,
579 579 hide_whitespace_changes, diff_context, c.fulldiff)
580 580
581 581 caching_enabled = self._is_diff_cache_enabled(c.target_repo)
582 582 force_recache = self.get_recache_flag()
583 583
584 584 cached_diff = None
585 585 if caching_enabled:
586 586 cached_diff = load_cached_diff(cache_file_path)
587 587
588 588 has_proper_commit_cache = (
589 589 cached_diff and cached_diff.get('commits')
590 590 and len(cached_diff.get('commits', [])) == 5
591 591 and cached_diff.get('commits')[0]
592 592 and cached_diff.get('commits')[3])
593 593
594 594 if not force_recache and not c.range_diff_on and has_proper_commit_cache:
595 595 diff_commit_cache = \
596 596 (ancestor_commit, commit_cache, missing_requirements,
597 597 source_commit, target_commit) = cached_diff['commits']
598 598 else:
599 599 # NOTE(marcink): we reach potentially unreachable errors when a PR has
600 600 # merge errors resulting in potentially hidden commits in the shadow repo.
601 601 maybe_unreachable = _merge_check.MERGE_CHECK in _merge_check.error_details \
602 602 and _merge_check.merge_response
603 603 maybe_unreachable = maybe_unreachable \
604 604 and _merge_check.merge_response.metadata.get('unresolved_files')
605 605 log.debug("Using unreachable commits due to MERGE_CHECK in merge simulation")
606 606 diff_commit_cache = \
607 607 (ancestor_commit, commit_cache, missing_requirements,
608 608 source_commit, target_commit) = self.get_commits(
609 609 commits_source_repo,
610 610 pull_request_at_ver,
611 611 source_commit,
612 612 source_ref_id,
613 613 source_scm,
614 614 target_commit,
615 615 target_ref_id,
616 616 target_scm,
617 617 maybe_unreachable=maybe_unreachable)
618 618
619 619 # register our commit range
620 620 for comm in commit_cache.values():
621 621 c.commit_ranges.append(comm)
622 622
623 623 c.missing_requirements = missing_requirements
624 624 c.ancestor_commit = ancestor_commit
625 625 c.statuses = source_repo.statuses(
626 626 [x.raw_id for x in c.commit_ranges])
627 627
628 628 # auto collapse if we have more than limit
629 629 collapse_limit = diffs.DiffProcessor._collapse_commits_over
630 630 c.collapse_all_commits = len(c.commit_ranges) > collapse_limit
631 631 c.compare_mode = compare
632 632
633 633 # diff_limit is the old behavior, will cut off the whole diff
634 634 # if the limit is applied otherwise will just hide the
635 635 # big files from the front-end
636 636 diff_limit = c.visual.cut_off_limit_diff
637 637 file_limit = c.visual.cut_off_limit_file
638 638
639 639 c.missing_commits = False
640 640 if (c.missing_requirements
641 641 or isinstance(source_commit, EmptyCommit)
642 642 or source_commit == target_commit):
643 643
644 644 c.missing_commits = True
645 645 else:
646 646 c.inline_comments = display_inline_comments
647 647
648 648 use_ancestor = True
649 649 if from_version_normalized != version_normalized:
650 650 use_ancestor = False
651 651
652 652 has_proper_diff_cache = cached_diff and cached_diff.get('commits')
653 653 if not force_recache and has_proper_diff_cache:
654 654 c.diffset = cached_diff['diff']
655 655 else:
656 656 try:
657 657 c.diffset = self._get_diffset(
658 658 c.source_repo.repo_name, commits_source_repo,
659 659 c.ancestor_commit,
660 660 source_ref_id, target_ref_id,
661 661 target_commit, source_commit,
662 662 diff_limit, file_limit, c.fulldiff,
663 663 hide_whitespace_changes, diff_context,
664 664 use_ancestor=use_ancestor
665 665 )
666 666
667 667 # save cached diff
668 668 if caching_enabled:
669 669 cache_diff(cache_file_path, c.diffset, diff_commit_cache)
670 670 except CommitDoesNotExistError:
671 671 log.exception('Failed to generate diffset')
672 672 c.missing_commits = True
673 673
674 674 if not c.missing_commits:
675 675
676 676 c.limited_diff = c.diffset.limited_diff
677 677
678 678 # calculate removed files that are bound to comments
679 679 comment_deleted_files = [
680 680 fname for fname in display_inline_comments
681 681 if fname not in c.diffset.file_stats]
682 682
683 683 c.deleted_files_comments = collections.defaultdict(dict)
684 684 for fname, per_line_comments in display_inline_comments.items():
685 685 if fname in comment_deleted_files:
686 686 c.deleted_files_comments[fname]['stats'] = 0
687 687 c.deleted_files_comments[fname]['comments'] = list()
688 688 for lno, comments in per_line_comments.items():
689 689 c.deleted_files_comments[fname]['comments'].extend(comments)
690 690
691 691 # maybe calculate the range diff
692 692 if c.range_diff_on:
693 693 # TODO(marcink): set whitespace/context
694 694 context_lcl = 3
695 695 ign_whitespace_lcl = False
696 696
697 697 for commit in c.commit_ranges:
698 698 commit2 = commit
699 699 commit1 = commit.first_parent
700 700
701 701 range_diff_cache_file_path = diff_cache_exist(
702 702 cache_path, 'diff', commit.raw_id,
703 703 ign_whitespace_lcl, context_lcl, c.fulldiff)
704 704
705 705 cached_diff = None
706 706 if caching_enabled:
707 707 cached_diff = load_cached_diff(range_diff_cache_file_path)
708 708
709 709 has_proper_diff_cache = cached_diff and cached_diff.get('diff')
710 710 if not force_recache and has_proper_diff_cache:
711 711 diffset = cached_diff['diff']
712 712 else:
713 713 diffset = self._get_range_diffset(
714 714 commits_source_repo, source_repo,
715 715 commit1, commit2, diff_limit, file_limit,
716 716 c.fulldiff, ign_whitespace_lcl, context_lcl
717 717 )
718 718
719 719 # save cached diff
720 720 if caching_enabled:
721 721 cache_diff(range_diff_cache_file_path, diffset, None)
722 722
723 723 c.changes[commit.raw_id] = diffset
724 724
725 725 # this is a hack to properly display links, when creating PR, the
726 726 # compare view and others uses different notation, and
727 727 # compare_commits.mako renders links based on the target_repo.
728 728 # We need to swap that here to generate it properly on the html side
729 729 c.target_repo = c.source_repo
730 730
731 731 c.commit_statuses = ChangesetStatus.STATUSES
732 732
733 733 c.show_version_changes = not pr_closed
734 734 if c.show_version_changes:
735 735 cur_obj = pull_request_at_ver
736 736 prev_obj = prev_pull_request_at_ver
737 737
738 738 old_commit_ids = prev_obj.revisions
739 739 new_commit_ids = cur_obj.revisions
740 740 commit_changes = PullRequestModel()._calculate_commit_id_changes(
741 741 old_commit_ids, new_commit_ids)
742 742 c.commit_changes_summary = commit_changes
743 743
744 744 # calculate the diff for commits between versions
745 745 c.commit_changes = []
746 746
747 747 def mark(cs, fw):
748 748 return list(h.itertools.izip_longest([], cs, fillvalue=fw))
749 749
750 750 for c_type, raw_id in mark(commit_changes.added, 'a') \
751 751 + mark(commit_changes.removed, 'r') \
752 752 + mark(commit_changes.common, 'c'):
753 753
754 754 if raw_id in commit_cache:
755 755 commit = commit_cache[raw_id]
756 756 else:
757 757 try:
758 758 commit = commits_source_repo.get_commit(raw_id)
759 759 except CommitDoesNotExistError:
760 760 # in case we fail extracting still use "dummy" commit
761 761 # for display in commit diff
762 762 commit = h.AttributeDict(
763 763 {'raw_id': raw_id,
764 764 'message': 'EMPTY or MISSING COMMIT'})
765 765 c.commit_changes.append([c_type, commit])
766 766
767 767 # current user review statuses for each version
768 768 c.review_versions = {}
769 769 is_reviewer = PullRequestModel().is_user_reviewer(
770 770 pull_request, self._rhodecode_user)
771 771 if is_reviewer:
772 772 for co in general_comments:
773 773 if co.author.user_id == self._rhodecode_user.user_id:
774 774 status = co.status_change
775 775 if status:
776 776 _ver_pr = status[0].comment.pull_request_version_id
777 777 c.review_versions[_ver_pr] = status[0]
778 778
779 779 return self._get_template_context(c)
780 780
781 781 def get_commits(
782 782 self, commits_source_repo, pull_request_at_ver, source_commit,
783 783 source_ref_id, source_scm, target_commit, target_ref_id, target_scm,
784 784 maybe_unreachable=False):
785 785
786 786 commit_cache = collections.OrderedDict()
787 787 missing_requirements = False
788 788
789 789 try:
790 790 pre_load = ["author", "date", "message", "branch", "parents"]
791 791
792 792 pull_request_commits = pull_request_at_ver.revisions
793 793 log.debug('Loading %s commits from %s',
794 794 len(pull_request_commits), commits_source_repo)
795 795
796 796 for rev in pull_request_commits:
797 797 comm = commits_source_repo.get_commit(commit_id=rev, pre_load=pre_load,
798 798 maybe_unreachable=maybe_unreachable)
799 799 commit_cache[comm.raw_id] = comm
800 800
801 801 # Order here matters, we first need to get target, and then
802 802 # the source
803 803 target_commit = commits_source_repo.get_commit(
804 804 commit_id=safe_str(target_ref_id))
805 805
806 806 source_commit = commits_source_repo.get_commit(
807 807 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
808 808 except CommitDoesNotExistError:
809 809 log.warning('Failed to get commit from `{}` repo'.format(
810 810 commits_source_repo), exc_info=True)
811 811 except RepositoryRequirementError:
812 812 log.warning('Failed to get all required data from repo', exc_info=True)
813 813 missing_requirements = True
814 814
815 815 pr_ancestor_id = pull_request_at_ver.common_ancestor_id
816 816
817 817 try:
818 818 ancestor_commit = source_scm.get_commit(pr_ancestor_id)
819 819 except Exception:
820 820 ancestor_commit = None
821 821
822 822 return ancestor_commit, commit_cache, missing_requirements, source_commit, target_commit
823 823
824 824 def assure_not_empty_repo(self):
825 825 _ = self.request.translate
826 826
827 827 try:
828 828 self.db_repo.scm_instance().get_commit()
829 829 except EmptyRepositoryError:
830 830 h.flash(h.literal(_('There are no commits yet')),
831 831 category='warning')
832 832 raise HTTPFound(
833 833 h.route_path('repo_summary', repo_name=self.db_repo.repo_name))
834 834
835 835 @LoginRequired()
836 836 @NotAnonymous()
837 837 @HasRepoPermissionAnyDecorator(
838 838 'repository.read', 'repository.write', 'repository.admin')
839 839 def pull_request_new(self):
840 840 _ = self.request.translate
841 841 c = self.load_default_context()
842 842
843 843 self.assure_not_empty_repo()
844 844 source_repo = self.db_repo
845 845
846 846 commit_id = self.request.GET.get('commit')
847 847 branch_ref = self.request.GET.get('branch')
848 848 bookmark_ref = self.request.GET.get('bookmark')
849 849
850 850 try:
851 851 source_repo_data = PullRequestModel().generate_repo_data(
852 852 source_repo, commit_id=commit_id,
853 853 branch=branch_ref, bookmark=bookmark_ref,
854 854 translator=self.request.translate)
855 855 except CommitDoesNotExistError as e:
856 856 log.exception(e)
857 857 h.flash(_('Commit does not exist'), 'error')
858 858 raise HTTPFound(
859 859 h.route_path('pullrequest_new', repo_name=source_repo.repo_name))
860 860
861 861 default_target_repo = source_repo
862 862
863 863 if source_repo.parent and c.has_origin_repo_read_perm:
864 864 parent_vcs_obj = source_repo.parent.scm_instance()
865 865 if parent_vcs_obj and not parent_vcs_obj.is_empty():
866 866 # change default if we have a parent repo
867 867 default_target_repo = source_repo.parent
868 868
869 869 target_repo_data = PullRequestModel().generate_repo_data(
870 870 default_target_repo, translator=self.request.translate)
871 871
872 872 selected_source_ref = source_repo_data['refs']['selected_ref']
873 873 title_source_ref = ''
874 874 if selected_source_ref:
875 875 title_source_ref = selected_source_ref.split(':', 2)[1]
876 876 c.default_title = PullRequestModel().generate_pullrequest_title(
877 877 source=source_repo.repo_name,
878 878 source_ref=title_source_ref,
879 879 target=default_target_repo.repo_name
880 880 )
881 881
882 882 c.default_repo_data = {
883 883 'source_repo_name': source_repo.repo_name,
884 884 'source_refs_json': json.dumps(source_repo_data),
885 885 'target_repo_name': default_target_repo.repo_name,
886 886 'target_refs_json': json.dumps(target_repo_data),
887 887 }
888 888 c.default_source_ref = selected_source_ref
889 889
890 890 return self._get_template_context(c)
891 891
892 892 @LoginRequired()
893 893 @NotAnonymous()
894 894 @HasRepoPermissionAnyDecorator(
895 895 'repository.read', 'repository.write', 'repository.admin')
896 896 def pull_request_repo_refs(self):
897 897 self.load_default_context()
898 898 target_repo_name = self.request.matchdict['target_repo_name']
899 899 repo = Repository.get_by_repo_name(target_repo_name)
900 900 if not repo:
901 901 raise HTTPNotFound()
902 902
903 903 target_perm = HasRepoPermissionAny(
904 904 'repository.read', 'repository.write', 'repository.admin')(
905 905 target_repo_name)
906 906 if not target_perm:
907 907 raise HTTPNotFound()
908 908
909 909 return PullRequestModel().generate_repo_data(
910 910 repo, translator=self.request.translate)
911 911
912 912 @LoginRequired()
913 913 @NotAnonymous()
914 914 @HasRepoPermissionAnyDecorator(
915 915 'repository.read', 'repository.write', 'repository.admin')
916 916 def pullrequest_repo_targets(self):
917 917 _ = self.request.translate
918 918 filter_query = self.request.GET.get('query')
919 919
920 920 # get the parents
921 921 parent_target_repos = []
922 922 if self.db_repo.parent:
923 923 parents_query = Repository.query() \
924 924 .order_by(func.length(Repository.repo_name)) \
925 925 .filter(Repository.fork_id == self.db_repo.parent.repo_id)
926 926
927 927 if filter_query:
928 928 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
929 929 parents_query = parents_query.filter(
930 930 Repository.repo_name.ilike(ilike_expression))
931 931 parents = parents_query.limit(20).all()
932 932
933 933 for parent in parents:
934 934 parent_vcs_obj = parent.scm_instance()
935 935 if parent_vcs_obj and not parent_vcs_obj.is_empty():
936 936 parent_target_repos.append(parent)
937 937
938 938 # get other forks, and repo itself
939 939 query = Repository.query() \
940 940 .order_by(func.length(Repository.repo_name)) \
941 941 .filter(
942 942 or_(Repository.repo_id == self.db_repo.repo_id, # repo itself
943 943 Repository.fork_id == self.db_repo.repo_id) # forks of this repo
944 944 ) \
945 945 .filter(~Repository.repo_id.in_([x.repo_id for x in parent_target_repos]))
946 946
947 947 if filter_query:
948 948 ilike_expression = u'%{}%'.format(safe_unicode(filter_query))
949 949 query = query.filter(Repository.repo_name.ilike(ilike_expression))
950 950
951 951 limit = max(20 - len(parent_target_repos), 5) # not less then 5
952 952 target_repos = query.limit(limit).all()
953 953
954 954 all_target_repos = target_repos + parent_target_repos
955 955
956 956 repos = []
957 957 # This checks permissions to the repositories
958 958 for obj in ScmModel().get_repos(all_target_repos):
959 959 repos.append({
960 960 'id': obj['name'],
961 961 'text': obj['name'],
962 962 'type': 'repo',
963 963 'repo_id': obj['dbrepo']['repo_id'],
964 964 'repo_type': obj['dbrepo']['repo_type'],
965 965 'private': obj['dbrepo']['private'],
966 966
967 967 })
968 968
969 969 data = {
970 970 'more': False,
971 971 'results': [{
972 972 'text': _('Repositories'),
973 973 'children': repos
974 974 }] if repos else []
975 975 }
976 976 return data
977 977
978 978 @classmethod
979 979 def get_comment_ids(cls, post_data):
980 980 return filter(lambda e: e > 0, map(safe_int, aslist(post_data.get('comments'), ',')))
981 981
982 982 @LoginRequired()
983 983 @NotAnonymous()
984 984 @HasRepoPermissionAnyDecorator(
985 985 'repository.read', 'repository.write', 'repository.admin')
986 986 def pullrequest_comments(self):
987 987 self.load_default_context()
988 988
989 989 pull_request = PullRequest.get_or_404(
990 990 self.request.matchdict['pull_request_id'])
991 991 pull_request_id = pull_request.pull_request_id
992 992 version = self.request.GET.get('version')
993 993
994 994 _render = self.request.get_partial_renderer(
995 995 'rhodecode:templates/base/sidebar.mako')
996 996 c = _render.get_call_context()
997 997
998 998 (pull_request_latest,
999 999 pull_request_at_ver,
1000 1000 pull_request_display_obj,
1001 1001 at_version) = PullRequestModel().get_pr_version(
1002 1002 pull_request_id, version=version)
1003 1003 versions = pull_request_display_obj.versions()
1004 1004 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1005 1005 c.versions = versions + [latest_ver]
1006 1006
1007 1007 c.at_version = at_version
1008 1008 c.at_version_num = (at_version
1009 1009 if at_version and at_version != PullRequest.LATEST_VER
1010 1010 else None)
1011 1011
1012 1012 self.register_comments_vars(c, pull_request_latest, versions, include_drafts=False)
1013 1013 all_comments = c.inline_comments_flat + c.comments
1014 1014
1015 1015 existing_ids = self.get_comment_ids(self.request.POST)
1016 1016 return _render('comments_table', all_comments, len(all_comments),
1017 1017 existing_ids=existing_ids)
1018 1018
1019 1019 @LoginRequired()
1020 1020 @NotAnonymous()
1021 1021 @HasRepoPermissionAnyDecorator(
1022 1022 'repository.read', 'repository.write', 'repository.admin')
1023 1023 def pullrequest_todos(self):
1024 1024 self.load_default_context()
1025 1025
1026 1026 pull_request = PullRequest.get_or_404(
1027 1027 self.request.matchdict['pull_request_id'])
1028 1028 pull_request_id = pull_request.pull_request_id
1029 1029 version = self.request.GET.get('version')
1030 1030
1031 1031 _render = self.request.get_partial_renderer(
1032 1032 'rhodecode:templates/base/sidebar.mako')
1033 1033 c = _render.get_call_context()
1034 1034 (pull_request_latest,
1035 1035 pull_request_at_ver,
1036 1036 pull_request_display_obj,
1037 1037 at_version) = PullRequestModel().get_pr_version(
1038 1038 pull_request_id, version=version)
1039 1039 versions = pull_request_display_obj.versions()
1040 1040 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1041 1041 c.versions = versions + [latest_ver]
1042 1042
1043 1043 c.at_version = at_version
1044 1044 c.at_version_num = (at_version
1045 1045 if at_version and at_version != PullRequest.LATEST_VER
1046 1046 else None)
1047 1047
1048 1048 c.unresolved_comments = CommentsModel() \
1049 1049 .get_pull_request_unresolved_todos(pull_request, include_drafts=False)
1050 1050 c.resolved_comments = CommentsModel() \
1051 1051 .get_pull_request_resolved_todos(pull_request, include_drafts=False)
1052 1052
1053 1053 all_comments = c.unresolved_comments + c.resolved_comments
1054 1054 existing_ids = self.get_comment_ids(self.request.POST)
1055 1055 return _render('comments_table', all_comments, len(c.unresolved_comments),
1056 1056 todo_comments=True, existing_ids=existing_ids)
1057 1057
1058 1058 @LoginRequired()
1059 1059 @NotAnonymous()
1060 1060 @HasRepoPermissionAnyDecorator(
1061 1061 'repository.read', 'repository.write', 'repository.admin')
1062 1062 def pullrequest_drafts(self):
1063 1063 self.load_default_context()
1064 1064
1065 1065 pull_request = PullRequest.get_or_404(
1066 1066 self.request.matchdict['pull_request_id'])
1067 1067 pull_request_id = pull_request.pull_request_id
1068 1068 version = self.request.GET.get('version')
1069 1069
1070 1070 _render = self.request.get_partial_renderer(
1071 1071 'rhodecode:templates/base/sidebar.mako')
1072 1072 c = _render.get_call_context()
1073 1073
1074 1074 (pull_request_latest,
1075 1075 pull_request_at_ver,
1076 1076 pull_request_display_obj,
1077 1077 at_version) = PullRequestModel().get_pr_version(
1078 1078 pull_request_id, version=version)
1079 1079 versions = pull_request_display_obj.versions()
1080 1080 latest_ver = PullRequest.get_pr_display_object(pull_request_latest, pull_request_latest)
1081 1081 c.versions = versions + [latest_ver]
1082 1082
1083 1083 c.at_version = at_version
1084 1084 c.at_version_num = (at_version
1085 1085 if at_version and at_version != PullRequest.LATEST_VER
1086 1086 else None)
1087 1087
1088 1088 c.draft_comments = CommentsModel() \
1089 1089 .get_pull_request_drafts(self._rhodecode_db_user.user_id, pull_request)
1090 1090
1091 1091 all_comments = c.draft_comments
1092 1092
1093 1093 existing_ids = self.get_comment_ids(self.request.POST)
1094 1094 return _render('comments_table', all_comments, len(all_comments),
1095 1095 existing_ids=existing_ids, draft_comments=True)
1096 1096
1097 1097 @LoginRequired()
1098 1098 @NotAnonymous()
1099 1099 @HasRepoPermissionAnyDecorator(
1100 1100 'repository.read', 'repository.write', 'repository.admin')
1101 1101 @CSRFRequired()
1102 1102 def pull_request_create(self):
1103 1103 _ = self.request.translate
1104 1104 self.assure_not_empty_repo()
1105 1105 self.load_default_context()
1106 1106
1107 1107 controls = peppercorn.parse(self.request.POST.items())
1108 1108
1109 1109 try:
1110 1110 form = PullRequestForm(
1111 1111 self.request.translate, self.db_repo.repo_id)()
1112 1112 _form = form.to_python(controls)
1113 1113 except formencode.Invalid as errors:
1114 1114 if errors.error_dict.get('revisions'):
1115 1115 msg = 'Revisions: %s' % errors.error_dict['revisions']
1116 1116 elif errors.error_dict.get('pullrequest_title'):
1117 1117 msg = errors.error_dict.get('pullrequest_title')
1118 1118 else:
1119 1119 msg = _('Error creating pull request: {}').format(errors)
1120 1120 log.exception(msg)
1121 1121 h.flash(msg, 'error')
1122 1122
1123 1123 # would rather just go back to form ...
1124 1124 raise HTTPFound(
1125 1125 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1126 1126
1127 1127 source_repo = _form['source_repo']
1128 1128 source_ref = _form['source_ref']
1129 1129 target_repo = _form['target_repo']
1130 1130 target_ref = _form['target_ref']
1131 1131 commit_ids = _form['revisions'][::-1]
1132 1132 common_ancestor_id = _form['common_ancestor']
1133 1133
1134 1134 # find the ancestor for this pr
1135 1135 source_db_repo = Repository.get_by_repo_name(_form['source_repo'])
1136 1136 target_db_repo = Repository.get_by_repo_name(_form['target_repo'])
1137 1137
1138 1138 if not (source_db_repo or target_db_repo):
1139 1139 h.flash(_('source_repo or target repo not found'), category='error')
1140 1140 raise HTTPFound(
1141 1141 h.route_path('pullrequest_new', repo_name=self.db_repo_name))
1142 1142
1143 1143 # re-check permissions again here
1144 1144 # source_repo we must have read permissions
1145 1145
1146 1146 source_perm = HasRepoPermissionAny(
1147 1147 'repository.read', 'repository.write', 'repository.admin')(
1148 1148 source_db_repo.repo_name)
1149 1149 if not source_perm:
1150 1150 msg = _('Not Enough permissions to source repo `{}`.'.format(
1151 1151 source_db_repo.repo_name))
1152 1152 h.flash(msg, category='error')
1153 1153 # copy the args back to redirect
1154 1154 org_query = self.request.GET.mixed()
1155 1155 raise HTTPFound(
1156 1156 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1157 1157 _query=org_query))
1158 1158
1159 1159 # target repo we must have read permissions, and also later on
1160 1160 # we want to check branch permissions here
1161 1161 target_perm = HasRepoPermissionAny(
1162 1162 'repository.read', 'repository.write', 'repository.admin')(
1163 1163 target_db_repo.repo_name)
1164 1164 if not target_perm:
1165 1165 msg = _('Not Enough permissions to target repo `{}`.'.format(
1166 1166 target_db_repo.repo_name))
1167 1167 h.flash(msg, category='error')
1168 1168 # copy the args back to redirect
1169 1169 org_query = self.request.GET.mixed()
1170 1170 raise HTTPFound(
1171 1171 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1172 1172 _query=org_query))
1173 1173
1174 1174 source_scm = source_db_repo.scm_instance()
1175 1175 target_scm = target_db_repo.scm_instance()
1176 1176
1177 1177 source_ref_obj = unicode_to_reference(source_ref)
1178 1178 target_ref_obj = unicode_to_reference(target_ref)
1179 1179
1180 1180 source_commit = source_scm.get_commit(source_ref_obj.commit_id)
1181 1181 target_commit = target_scm.get_commit(target_ref_obj.commit_id)
1182 1182
1183 1183 ancestor = source_scm.get_common_ancestor(
1184 1184 source_commit.raw_id, target_commit.raw_id, target_scm)
1185 1185
1186 1186 # recalculate target ref based on ancestor
1187 1187 target_ref = ':'.join((target_ref_obj.type, target_ref_obj.name, ancestor))
1188 1188
1189 1189 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1190 1190 PullRequestModel().get_reviewer_functions()
1191 1191
1192 1192 # recalculate reviewers logic, to make sure we can validate this
1193 1193 reviewer_rules = get_default_reviewers_data(
1194 1194 self._rhodecode_db_user,
1195 1195 source_db_repo,
1196 1196 source_ref_obj,
1197 1197 target_db_repo,
1198 1198 target_ref_obj,
1199 1199 include_diff_info=False)
1200 1200
1201 1201 reviewers = validate_default_reviewers(_form['review_members'], reviewer_rules)
1202 1202 observers = validate_observers(_form['observer_members'], reviewer_rules)
1203 1203
1204 1204 pullrequest_title = _form['pullrequest_title']
1205 1205 title_source_ref = source_ref_obj.name
1206 1206 if not pullrequest_title:
1207 1207 pullrequest_title = PullRequestModel().generate_pullrequest_title(
1208 1208 source=source_repo,
1209 1209 source_ref=title_source_ref,
1210 1210 target=target_repo
1211 1211 )
1212 1212
1213 1213 description = _form['pullrequest_desc']
1214 1214 description_renderer = _form['description_renderer']
1215 1215
1216 1216 try:
1217 1217 pull_request = PullRequestModel().create(
1218 1218 created_by=self._rhodecode_user.user_id,
1219 1219 source_repo=source_repo,
1220 1220 source_ref=source_ref,
1221 1221 target_repo=target_repo,
1222 1222 target_ref=target_ref,
1223 1223 revisions=commit_ids,
1224 1224 common_ancestor_id=common_ancestor_id,
1225 1225 reviewers=reviewers,
1226 1226 observers=observers,
1227 1227 title=pullrequest_title,
1228 1228 description=description,
1229 1229 description_renderer=description_renderer,
1230 1230 reviewer_data=reviewer_rules,
1231 1231 auth_user=self._rhodecode_user
1232 1232 )
1233 1233 Session().commit()
1234 1234
1235 1235 h.flash(_('Successfully opened new pull request'),
1236 1236 category='success')
1237 1237 except Exception:
1238 1238 msg = _('Error occurred during creation of this pull request.')
1239 1239 log.exception(msg)
1240 1240 h.flash(msg, category='error')
1241 1241
1242 1242 # copy the args back to redirect
1243 1243 org_query = self.request.GET.mixed()
1244 1244 raise HTTPFound(
1245 1245 h.route_path('pullrequest_new', repo_name=self.db_repo_name,
1246 1246 _query=org_query))
1247 1247
1248 1248 raise HTTPFound(
1249 1249 h.route_path('pullrequest_show', repo_name=target_repo,
1250 1250 pull_request_id=pull_request.pull_request_id))
1251 1251
1252 1252 @LoginRequired()
1253 1253 @NotAnonymous()
1254 1254 @HasRepoPermissionAnyDecorator(
1255 1255 'repository.read', 'repository.write', 'repository.admin')
1256 1256 @CSRFRequired()
1257 1257 def pull_request_update(self):
1258 1258 pull_request = PullRequest.get_or_404(
1259 1259 self.request.matchdict['pull_request_id'])
1260 1260 _ = self.request.translate
1261 1261
1262 1262 c = self.load_default_context()
1263 1263 redirect_url = None
1264 1264
1265 1265 if pull_request.is_closed():
1266 1266 log.debug('update: forbidden because pull request is closed')
1267 1267 msg = _(u'Cannot update closed pull requests.')
1268 1268 h.flash(msg, category='error')
1269 1269 return {'response': True,
1270 1270 'redirect_url': redirect_url}
1271 1271
1272 1272 is_state_changing = pull_request.is_state_changing()
1273 1273 c.pr_broadcast_channel = channelstream.pr_channel(pull_request)
1274 1274
1275 1275 # only owner or admin can update it
1276 1276 allowed_to_update = PullRequestModel().check_user_update(
1277 1277 pull_request, self._rhodecode_user)
1278 1278
1279 1279 if allowed_to_update:
1280 1280 controls = peppercorn.parse(self.request.POST.items())
1281 1281 force_refresh = str2bool(self.request.POST.get('force_refresh'))
1282 1282
1283 1283 if 'review_members' in controls:
1284 1284 self._update_reviewers(
1285 1285 c,
1286 1286 pull_request, controls['review_members'],
1287 1287 pull_request.reviewer_data,
1288 1288 PullRequestReviewers.ROLE_REVIEWER)
1289 1289 elif 'observer_members' in controls:
1290 1290 self._update_reviewers(
1291 1291 c,
1292 1292 pull_request, controls['observer_members'],
1293 1293 pull_request.reviewer_data,
1294 1294 PullRequestReviewers.ROLE_OBSERVER)
1295 1295 elif str2bool(self.request.POST.get('update_commits', 'false')):
1296 1296 if is_state_changing:
1297 1297 log.debug('commits update: forbidden because pull request is in state %s',
1298 1298 pull_request.pull_request_state)
1299 1299 msg = _(u'Cannot update pull requests commits in state other than `{}`. '
1300 1300 u'Current state is: `{}`').format(
1301 1301 PullRequest.STATE_CREATED, pull_request.pull_request_state)
1302 1302 h.flash(msg, category='error')
1303 1303 return {'response': True,
1304 1304 'redirect_url': redirect_url}
1305 1305
1306 1306 self._update_commits(c, pull_request)
1307 1307 if force_refresh:
1308 1308 redirect_url = h.route_path(
1309 1309 'pullrequest_show', repo_name=self.db_repo_name,
1310 1310 pull_request_id=pull_request.pull_request_id,
1311 1311 _query={"force_refresh": 1})
1312 1312 elif str2bool(self.request.POST.get('edit_pull_request', 'false')):
1313 1313 self._edit_pull_request(pull_request)
1314 1314 else:
1315 1315 log.error('Unhandled update data.')
1316 1316 raise HTTPBadRequest()
1317 1317
1318 1318 return {'response': True,
1319 1319 'redirect_url': redirect_url}
1320 1320 raise HTTPForbidden()
1321 1321
1322 1322 def _edit_pull_request(self, pull_request):
1323 1323 """
1324 1324 Edit title and description
1325 1325 """
1326 1326 _ = self.request.translate
1327 1327
1328 1328 try:
1329 1329 PullRequestModel().edit(
1330 1330 pull_request,
1331 1331 self.request.POST.get('title'),
1332 1332 self.request.POST.get('description'),
1333 1333 self.request.POST.get('description_renderer'),
1334 1334 self._rhodecode_user)
1335 1335 except ValueError:
1336 1336 msg = _(u'Cannot update closed pull requests.')
1337 1337 h.flash(msg, category='error')
1338 1338 return
1339 1339 else:
1340 1340 Session().commit()
1341 1341
1342 1342 msg = _(u'Pull request title & description updated.')
1343 1343 h.flash(msg, category='success')
1344 1344 return
1345 1345
1346 1346 def _update_commits(self, c, pull_request):
1347 1347 _ = self.request.translate
1348 1348
1349 1349 with pull_request.set_state(PullRequest.STATE_UPDATING):
1350 1350 resp = PullRequestModel().update_commits(
1351 1351 pull_request, self._rhodecode_db_user)
1352 1352
1353 1353 if resp.executed:
1354 1354
1355 1355 if resp.target_changed and resp.source_changed:
1356 1356 changed = 'target and source repositories'
1357 1357 elif resp.target_changed and not resp.source_changed:
1358 1358 changed = 'target repository'
1359 1359 elif not resp.target_changed and resp.source_changed:
1360 1360 changed = 'source repository'
1361 1361 else:
1362 1362 changed = 'nothing'
1363 1363
1364 1364 msg = _(u'Pull request updated to "{source_commit_id}" with '
1365 1365 u'{count_added} added, {count_removed} removed commits. '
1366 1366 u'Source of changes: {change_source}.')
1367 1367 msg = msg.format(
1368 1368 source_commit_id=pull_request.source_ref_parts.commit_id,
1369 1369 count_added=len(resp.changes.added),
1370 1370 count_removed=len(resp.changes.removed),
1371 1371 change_source=changed)
1372 1372 h.flash(msg, category='success')
1373 1373 channelstream.pr_update_channelstream_push(
1374 1374 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1375 1375 else:
1376 1376 msg = PullRequestModel.UPDATE_STATUS_MESSAGES[resp.reason]
1377 1377 warning_reasons = [
1378 1378 UpdateFailureReason.NO_CHANGE,
1379 1379 UpdateFailureReason.WRONG_REF_TYPE,
1380 1380 ]
1381 1381 category = 'warning' if resp.reason in warning_reasons else 'error'
1382 1382 h.flash(msg, category=category)
1383 1383
1384 1384 def _update_reviewers(self, c, pull_request, review_members, reviewer_rules, role):
1385 1385 _ = self.request.translate
1386 1386
1387 1387 get_default_reviewers_data, validate_default_reviewers, validate_observers = \
1388 1388 PullRequestModel().get_reviewer_functions()
1389 1389
1390 1390 if role == PullRequestReviewers.ROLE_REVIEWER:
1391 1391 try:
1392 1392 reviewers = validate_default_reviewers(review_members, reviewer_rules)
1393 1393 except ValueError as e:
1394 1394 log.error('Reviewers Validation: {}'.format(e))
1395 1395 h.flash(e, category='error')
1396 1396 return
1397 1397
1398 1398 old_calculated_status = pull_request.calculated_review_status()
1399 1399 PullRequestModel().update_reviewers(
1400 1400 pull_request, reviewers, self._rhodecode_db_user)
1401 1401
1402 1402 Session().commit()
1403 1403
1404 1404 msg = _('Pull request reviewers updated.')
1405 1405 h.flash(msg, category='success')
1406 1406 channelstream.pr_update_channelstream_push(
1407 1407 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1408 1408
1409 1409 # trigger status changed if change in reviewers changes the status
1410 1410 calculated_status = pull_request.calculated_review_status()
1411 1411 if old_calculated_status != calculated_status:
1412 1412 PullRequestModel().trigger_pull_request_hook(
1413 1413 pull_request, self._rhodecode_user, 'review_status_change',
1414 1414 data={'status': calculated_status})
1415 1415
1416 1416 elif role == PullRequestReviewers.ROLE_OBSERVER:
1417 1417 try:
1418 1418 observers = validate_observers(review_members, reviewer_rules)
1419 1419 except ValueError as e:
1420 1420 log.error('Observers Validation: {}'.format(e))
1421 1421 h.flash(e, category='error')
1422 1422 return
1423 1423
1424 1424 PullRequestModel().update_observers(
1425 1425 pull_request, observers, self._rhodecode_db_user)
1426 1426
1427 1427 Session().commit()
1428 1428 msg = _('Pull request observers updated.')
1429 1429 h.flash(msg, category='success')
1430 1430 channelstream.pr_update_channelstream_push(
1431 1431 self.request, c.pr_broadcast_channel, self._rhodecode_user, msg)
1432 1432
1433 1433 @LoginRequired()
1434 1434 @NotAnonymous()
1435 1435 @HasRepoPermissionAnyDecorator(
1436 1436 'repository.read', 'repository.write', 'repository.admin')
1437 1437 @CSRFRequired()
1438 1438 def pull_request_merge(self):
1439 1439 """
1440 1440 Merge will perform a server-side merge of the specified
1441 1441 pull request, if the pull request is approved and mergeable.
1442 1442 After successful merging, the pull request is automatically
1443 1443 closed, with a relevant comment.
1444 1444 """
1445 1445 pull_request = PullRequest.get_or_404(
1446 1446 self.request.matchdict['pull_request_id'])
1447 1447 _ = self.request.translate
1448 1448
1449 1449 if pull_request.is_state_changing():
1450 1450 log.debug('show: forbidden because pull request is in state %s',
1451 1451 pull_request.pull_request_state)
1452 1452 msg = _(u'Cannot merge pull requests in state other than `{}`. '
1453 1453 u'Current state is: `{}`').format(PullRequest.STATE_CREATED,
1454 1454 pull_request.pull_request_state)
1455 1455 h.flash(msg, category='error')
1456 1456 raise HTTPFound(
1457 1457 h.route_path('pullrequest_show',
1458 1458 repo_name=pull_request.target_repo.repo_name,
1459 1459 pull_request_id=pull_request.pull_request_id))
1460 1460
1461 1461 self.load_default_context()
1462 1462
1463 1463 with pull_request.set_state(PullRequest.STATE_UPDATING):
1464 1464 check = MergeCheck.validate(
1465 1465 pull_request, auth_user=self._rhodecode_user,
1466 1466 translator=self.request.translate)
1467 1467 merge_possible = not check.failed
1468 1468
1469 1469 for err_type, error_msg in check.errors:
1470 1470 h.flash(error_msg, category=err_type)
1471 1471
1472 1472 if merge_possible:
1473 1473 log.debug("Pre-conditions checked, trying to merge.")
1474 1474 extras = vcs_operation_context(
1475 1475 self.request.environ, repo_name=pull_request.target_repo.repo_name,
1476 1476 username=self._rhodecode_db_user.username, action='push',
1477 1477 scm=pull_request.target_repo.repo_type)
1478 1478 with pull_request.set_state(PullRequest.STATE_UPDATING):
1479 1479 self._merge_pull_request(
1480 1480 pull_request, self._rhodecode_db_user, extras)
1481 1481 else:
1482 1482 log.debug("Pre-conditions failed, NOT merging.")
1483 1483
1484 1484 raise HTTPFound(
1485 1485 h.route_path('pullrequest_show',
1486 1486 repo_name=pull_request.target_repo.repo_name,
1487 1487 pull_request_id=pull_request.pull_request_id))
1488 1488
1489 1489 def _merge_pull_request(self, pull_request, user, extras):
1490 1490 _ = self.request.translate
1491 1491 merge_resp = PullRequestModel().merge_repo(pull_request, user, extras=extras)
1492 1492
1493 1493 if merge_resp.executed:
1494 1494 log.debug("The merge was successful, closing the pull request.")
1495 1495 PullRequestModel().close_pull_request(
1496 1496 pull_request.pull_request_id, user)
1497 1497 Session().commit()
1498 1498 msg = _('Pull request was successfully merged and closed.')
1499 1499 h.flash(msg, category='success')
1500 1500 else:
1501 1501 log.debug(
1502 1502 "The merge was not successful. Merge response: %s", merge_resp)
1503 1503 msg = merge_resp.merge_status_message
1504 1504 h.flash(msg, category='error')
1505 1505
1506 1506 @LoginRequired()
1507 1507 @NotAnonymous()
1508 1508 @HasRepoPermissionAnyDecorator(
1509 1509 'repository.read', 'repository.write', 'repository.admin')
1510 1510 @CSRFRequired()
1511 1511 def pull_request_delete(self):
1512 1512 _ = self.request.translate
1513 1513
1514 1514 pull_request = PullRequest.get_or_404(
1515 1515 self.request.matchdict['pull_request_id'])
1516 1516 self.load_default_context()
1517 1517
1518 1518 pr_closed = pull_request.is_closed()
1519 1519 allowed_to_delete = PullRequestModel().check_user_delete(
1520 1520 pull_request, self._rhodecode_user) and not pr_closed
1521 1521
1522 1522 # only owner can delete it !
1523 1523 if allowed_to_delete:
1524 1524 PullRequestModel().delete(pull_request, self._rhodecode_user)
1525 1525 Session().commit()
1526 1526 h.flash(_('Successfully deleted pull request'),
1527 1527 category='success')
1528 1528 raise HTTPFound(h.route_path('pullrequest_show_all',
1529 1529 repo_name=self.db_repo_name))
1530 1530
1531 1531 log.warning('user %s tried to delete pull request without access',
1532 1532 self._rhodecode_user)
1533 1533 raise HTTPNotFound()
1534 1534
1535 1535 def _pull_request_comments_create(self, pull_request, comments):
1536 1536 _ = self.request.translate
1537 1537 data = {}
1538 1538 if not comments:
1539 1539 return
1540 1540 pull_request_id = pull_request.pull_request_id
1541 1541
1542 1542 all_drafts = len([x for x in comments if str2bool(x['is_draft'])]) == len(comments)
1543 1543
1544 1544 for entry in comments:
1545 1545 c = self.load_default_context()
1546 1546 comment_type = entry['comment_type']
1547 1547 text = entry['text']
1548 1548 status = entry['status']
1549 1549 is_draft = str2bool(entry['is_draft'])
1550 1550 resolves_comment_id = entry['resolves_comment_id']
1551 1551 close_pull_request = entry['close_pull_request']
1552 1552 f_path = entry['f_path']
1553 1553 line_no = entry['line']
1554 1554 target_elem_id = 'file-{}'.format(h.safeid(h.safe_unicode(f_path)))
1555 1555
1556 1556 # the logic here should work like following, if we submit close
1557 1557 # pr comment, use `close_pull_request_with_comment` function
1558 1558 # else handle regular comment logic
1559 1559
1560 1560 if close_pull_request:
1561 1561 # only owner or admin or person with write permissions
1562 1562 allowed_to_close = PullRequestModel().check_user_update(
1563 1563 pull_request, self._rhodecode_user)
1564 1564 if not allowed_to_close:
1565 1565 log.debug('comment: forbidden because not allowed to close '
1566 1566 'pull request %s', pull_request_id)
1567 1567 raise HTTPForbidden()
1568 1568
1569 1569 # This also triggers `review_status_change`
1570 1570 comment, status = PullRequestModel().close_pull_request_with_comment(
1571 1571 pull_request, self._rhodecode_user, self.db_repo, message=text,
1572 1572 auth_user=self._rhodecode_user)
1573 1573 Session().flush()
1574 1574 is_inline = comment.is_inline
1575 1575
1576 1576 PullRequestModel().trigger_pull_request_hook(
1577 1577 pull_request, self._rhodecode_user, 'comment',
1578 1578 data={'comment': comment})
1579 1579
1580 1580 else:
1581 1581 # regular comment case, could be inline, or one with status.
1582 1582 # for that one we check also permissions
1583 1583 # Additionally ENSURE if somehow draft is sent we're then unable to change status
1584 1584 allowed_to_change_status = PullRequestModel().check_user_change_status(
1585 1585 pull_request, self._rhodecode_user) and not is_draft
1586 1586
1587 1587 if status and allowed_to_change_status:
1588 1588 message = (_('Status change %(transition_icon)s %(status)s')
1589 1589 % {'transition_icon': '>',
1590 1590 'status': ChangesetStatus.get_status_lbl(status)})
1591 1591 text = text or message
1592 1592
1593 1593 comment = CommentsModel().create(
1594 1594 text=text,
1595 1595 repo=self.db_repo.repo_id,
1596 1596 user=self._rhodecode_user.user_id,
1597 1597 pull_request=pull_request,
1598 1598 f_path=f_path,
1599 1599 line_no=line_no,
1600 1600 status_change=(ChangesetStatus.get_status_lbl(status)
1601 1601 if status and allowed_to_change_status else None),
1602 1602 status_change_type=(status
1603 1603 if status and allowed_to_change_status else None),
1604 1604 comment_type=comment_type,
1605 1605 is_draft=is_draft,
1606 1606 resolves_comment_id=resolves_comment_id,
1607 1607 auth_user=self._rhodecode_user,
1608 1608 send_email=not is_draft, # skip notification for draft comments
1609 1609 )
1610 1610 is_inline = comment.is_inline
1611 1611
1612 1612 if allowed_to_change_status:
1613 1613 # calculate old status before we change it
1614 1614 old_calculated_status = pull_request.calculated_review_status()
1615 1615
1616 1616 # get status if set !
1617 1617 if status:
1618 1618 ChangesetStatusModel().set_status(
1619 1619 self.db_repo.repo_id,
1620 1620 status,
1621 1621 self._rhodecode_user.user_id,
1622 1622 comment,
1623 1623 pull_request=pull_request
1624 1624 )
1625 1625
1626 1626 Session().flush()
1627 1627 # this is somehow required to get access to some relationship
1628 1628 # loaded on comment
1629 1629 Session().refresh(comment)
1630 1630
1631 1631 # skip notifications for drafts
1632 1632 if not is_draft:
1633 1633 PullRequestModel().trigger_pull_request_hook(
1634 1634 pull_request, self._rhodecode_user, 'comment',
1635 1635 data={'comment': comment})
1636 1636
1637 1637 # we now calculate the status of pull request, and based on that
1638 1638 # calculation we set the commits status
1639 1639 calculated_status = pull_request.calculated_review_status()
1640 1640 if old_calculated_status != calculated_status:
1641 1641 PullRequestModel().trigger_pull_request_hook(
1642 1642 pull_request, self._rhodecode_user, 'review_status_change',
1643 1643 data={'status': calculated_status})
1644 1644
1645 1645 comment_id = comment.comment_id
1646 1646 data[comment_id] = {
1647 1647 'target_id': target_elem_id
1648 1648 }
1649 1649 Session().flush()
1650 1650
1651 1651 c.co = comment
1652 1652 c.at_version_num = None
1653 1653 c.is_new = True
1654 1654 rendered_comment = render(
1655 1655 'rhodecode:templates/changeset/changeset_comment_block.mako',
1656 1656 self._get_template_context(c), self.request)
1657 1657
1658 1658 data[comment_id].update(comment.get_dict())
1659 1659 data[comment_id].update({'rendered_text': rendered_comment})
1660 1660
1661 1661 Session().commit()
1662 1662
1663 1663 # skip channelstream for draft comments
1664 1664 if not all_drafts:
1665 1665 comment_broadcast_channel = channelstream.comment_channel(
1666 1666 self.db_repo_name, pull_request_obj=pull_request)
1667 1667
1668 1668 comment_data = data
1669 1669 posted_comment_type = 'inline' if is_inline else 'general'
1670 1670 if len(data) == 1:
1671 1671 msg = _('posted {} new {} comment').format(len(data), posted_comment_type)
1672 1672 else:
1673 1673 msg = _('posted {} new {} comments').format(len(data), posted_comment_type)
1674 1674
1675 1675 channelstream.comment_channelstream_push(
1676 1676 self.request, comment_broadcast_channel, self._rhodecode_user, msg,
1677 1677 comment_data=comment_data)
1678 1678
1679 1679 return data
1680 1680
1681 1681 @LoginRequired()
1682 1682 @NotAnonymous()
1683 1683 @HasRepoPermissionAnyDecorator(
1684 1684 'repository.read', 'repository.write', 'repository.admin')
1685 1685 @CSRFRequired()
1686 1686 def pull_request_comment_create(self):
1687 1687 _ = self.request.translate
1688 1688
1689 1689 pull_request = PullRequest.get_or_404(self.request.matchdict['pull_request_id'])
1690 1690
1691 1691 if pull_request.is_closed():
1692 1692 log.debug('comment: forbidden because pull request is closed')
1693 1693 raise HTTPForbidden()
1694 1694
1695 1695 allowed_to_comment = PullRequestModel().check_user_comment(
1696 1696 pull_request, self._rhodecode_user)
1697 1697 if not allowed_to_comment:
1698 1698 log.debug('comment: forbidden because pull request is from forbidden repo')
1699 1699 raise HTTPForbidden()
1700 1700
1701 1701 comment_data = {
1702 1702 'comment_type': self.request.POST.get('comment_type'),
1703 1703 'text': self.request.POST.get('text'),
1704 1704 'status': self.request.POST.get('changeset_status', None),
1705 1705 'is_draft': self.request.POST.get('draft'),
1706 1706 'resolves_comment_id': self.request.POST.get('resolves_comment_id', None),
1707 1707 'close_pull_request': self.request.POST.get('close_pull_request'),
1708 1708 'f_path': self.request.POST.get('f_path'),
1709 1709 'line': self.request.POST.get('line'),
1710 1710 }
1711 1711 data = self._pull_request_comments_create(pull_request, [comment_data])
1712 1712
1713 1713 return data
1714 1714
1715 1715 @LoginRequired()
1716 1716 @NotAnonymous()
1717 1717 @HasRepoPermissionAnyDecorator(
1718 1718 'repository.read', 'repository.write', 'repository.admin')
1719 1719 @CSRFRequired()
1720 1720 def pull_request_comment_delete(self):
1721 1721 pull_request = PullRequest.get_or_404(
1722 1722 self.request.matchdict['pull_request_id'])
1723 1723
1724 1724 comment = ChangesetComment.get_or_404(
1725 1725 self.request.matchdict['comment_id'])
1726 1726 comment_id = comment.comment_id
1727 1727
1728 1728 if comment.immutable:
1729 1729 # don't allow deleting comments that are immutable
1730 1730 raise HTTPForbidden()
1731 1731
1732 1732 if pull_request.is_closed():
1733 1733 log.debug('comment: forbidden because pull request is closed')
1734 1734 raise HTTPForbidden()
1735 1735
1736 1736 if not comment:
1737 1737 log.debug('Comment with id:%s not found, skipping', comment_id)
1738 1738 # comment already deleted in another call probably
1739 1739 return True
1740 1740
1741 1741 if comment.pull_request.is_closed():
1742 1742 # don't allow deleting comments on closed pull request
1743 1743 raise HTTPForbidden()
1744 1744
1745 1745 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1746 1746 super_admin = h.HasPermissionAny('hg.admin')()
1747 1747 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1748 1748 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1749 1749 comment_repo_admin = is_repo_admin and is_repo_comment
1750 1750
1751 if comment.draft and not comment_owner:
1752 # We never allow to delete draft comments for other than owners
1753 raise HTTPNotFound()
1754
1751 1755 if super_admin or comment_owner or comment_repo_admin:
1752 1756 old_calculated_status = comment.pull_request.calculated_review_status()
1753 1757 CommentsModel().delete(comment=comment, auth_user=self._rhodecode_user)
1754 1758 Session().commit()
1755 1759 calculated_status = comment.pull_request.calculated_review_status()
1756 1760 if old_calculated_status != calculated_status:
1757 1761 PullRequestModel().trigger_pull_request_hook(
1758 1762 comment.pull_request, self._rhodecode_user, 'review_status_change',
1759 1763 data={'status': calculated_status})
1760 1764 return True
1761 1765 else:
1762 1766 log.warning('No permissions for user %s to delete comment_id: %s',
1763 1767 self._rhodecode_db_user, comment_id)
1764 1768 raise HTTPNotFound()
1765 1769
1766 1770 @LoginRequired()
1767 1771 @NotAnonymous()
1768 1772 @HasRepoPermissionAnyDecorator(
1769 1773 'repository.read', 'repository.write', 'repository.admin')
1770 1774 @CSRFRequired()
1771 1775 def pull_request_comment_edit(self):
1772 1776 self.load_default_context()
1773 1777
1774 1778 pull_request = PullRequest.get_or_404(
1775 1779 self.request.matchdict['pull_request_id']
1776 1780 )
1777 1781 comment = ChangesetComment.get_or_404(
1778 1782 self.request.matchdict['comment_id']
1779 1783 )
1780 1784 comment_id = comment.comment_id
1781 1785
1782 1786 if comment.immutable:
1783 1787 # don't allow deleting comments that are immutable
1784 1788 raise HTTPForbidden()
1785 1789
1786 1790 if pull_request.is_closed():
1787 1791 log.debug('comment: forbidden because pull request is closed')
1788 1792 raise HTTPForbidden()
1789 1793
1790 1794 if comment.pull_request.is_closed():
1791 1795 # don't allow deleting comments on closed pull request
1792 1796 raise HTTPForbidden()
1793 1797
1794 1798 is_repo_admin = h.HasRepoPermissionAny('repository.admin')(self.db_repo_name)
1795 1799 super_admin = h.HasPermissionAny('hg.admin')()
1796 1800 comment_owner = comment.author.user_id == self._rhodecode_user.user_id
1797 1801 is_repo_comment = comment.repo.repo_name == self.db_repo_name
1798 1802 comment_repo_admin = is_repo_admin and is_repo_comment
1799 1803
1800 1804 if super_admin or comment_owner or comment_repo_admin:
1801 1805 text = self.request.POST.get('text')
1802 1806 version = self.request.POST.get('version')
1803 1807 if text == comment.text:
1804 1808 log.warning(
1805 1809 'Comment(PR): '
1806 1810 'Trying to create new version '
1807 1811 'with the same comment body {}'.format(
1808 1812 comment_id,
1809 1813 )
1810 1814 )
1811 1815 raise HTTPNotFound()
1812 1816
1813 1817 if version.isdigit():
1814 1818 version = int(version)
1815 1819 else:
1816 1820 log.warning(
1817 1821 'Comment(PR): Wrong version type {} {} '
1818 1822 'for comment {}'.format(
1819 1823 version,
1820 1824 type(version),
1821 1825 comment_id,
1822 1826 )
1823 1827 )
1824 1828 raise HTTPNotFound()
1825 1829
1826 1830 try:
1827 1831 comment_history = CommentsModel().edit(
1828 1832 comment_id=comment_id,
1829 1833 text=text,
1830 1834 auth_user=self._rhodecode_user,
1831 1835 version=version,
1832 1836 )
1833 1837 except CommentVersionMismatch:
1834 1838 raise HTTPConflict()
1835 1839
1836 1840 if not comment_history:
1837 1841 raise HTTPNotFound()
1838 1842
1839 1843 Session().commit()
1840 1844 if not comment.draft:
1841 1845 PullRequestModel().trigger_pull_request_hook(
1842 1846 pull_request, self._rhodecode_user, 'comment_edit',
1843 1847 data={'comment': comment})
1844 1848
1845 1849 return {
1846 1850 'comment_history_id': comment_history.comment_history_id,
1847 1851 'comment_id': comment.comment_id,
1848 1852 'comment_version': comment_history.version,
1849 1853 'comment_author_username': comment_history.author.username,
1850 1854 'comment_author_gravatar': h.gravatar_url(comment_history.author.email, 16),
1851 1855 'comment_created_on': h.age_component(comment_history.created_on,
1852 1856 time_is_local=True),
1853 1857 }
1854 1858 else:
1855 1859 log.warning('No permissions for user %s to edit comment_id: %s',
1856 1860 self._rhodecode_db_user, comment_id)
1857 1861 raise HTTPNotFound()
@@ -1,220 +1,228 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import logging
24 24 import datetime
25 25 from pyramid.compat import configparser
26 26
27 27 from rhodecode.model.db import Session, User, UserSshKeys
28 28 from rhodecode.model.scm import ScmModel
29 29
30 30 from .hg import MercurialServer
31 31 from .git import GitServer
32 32 from .svn import SubversionServer
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 class SshWrapper(object):
37 37
38 38 def __init__(self, command, connection_info, mode,
39 39 user, user_id, key_id, shell, ini_path, env):
40 40 self.command = command
41 41 self.connection_info = connection_info
42 42 self.mode = mode
43 43 self.user = user
44 44 self.user_id = user_id
45 45 self.key_id = key_id
46 46 self.shell = shell
47 47 self.ini_path = ini_path
48 48 self.env = env
49 49
50 50 self.config = self.parse_config(ini_path)
51 51 self.server_impl = None
52 52
53 53 def parse_config(self, config_path):
54 54 parser = configparser.ConfigParser()
55 55 parser.read(config_path)
56 56 return parser
57 57
58 58 def update_key_access_time(self, key_id):
59 59 key = UserSshKeys().query().filter(
60 60 UserSshKeys.ssh_key_id == key_id).scalar()
61 61 if key:
62 62 key.accessed_on = datetime.datetime.utcnow()
63 63 Session().add(key)
64 64 Session().commit()
65 65 log.debug('Update key id:`%s` fingerprint:`%s` access time',
66 66 key_id, key.ssh_key_fingerprint)
67 67
68 68 def get_connection_info(self):
69 69 """
70 70 connection_info
71 71
72 72 Identifies the client and server ends of the connection.
73 73 The variable contains four space-separated values: client IP address,
74 74 client port number, server IP address, and server port number.
75 75 """
76 76 conn = dict(
77 77 client_ip=None,
78 78 client_port=None,
79 79 server_ip=None,
80 80 server_port=None,
81 81 )
82 82
83 83 info = self.connection_info.split(' ')
84 84 if len(info) == 4:
85 85 conn['client_ip'] = info[0]
86 86 conn['client_port'] = info[1]
87 87 conn['server_ip'] = info[2]
88 88 conn['server_port'] = info[3]
89 89
90 90 return conn
91 91
92 def maybe_translate_repo_uid(self, repo_name):
93 if repo_name.startswith('_'):
94 from rhodecode.model.repo import RepoModel
95 by_id_match = RepoModel().get_repo_by_id(repo_name)
96 if by_id_match:
97 repo_name = by_id_match.repo_name
98 return repo_name
99
92 100 def get_repo_details(self, mode):
93 101 vcs_type = mode if mode in ['svn', 'hg', 'git'] else None
94 102 repo_name = None
95 103
96 104 hg_pattern = r'^hg\s+\-R\s+(\S+)\s+serve\s+\-\-stdio$'
97 105 hg_match = re.match(hg_pattern, self.command)
98 106 if hg_match is not None:
99 107 vcs_type = 'hg'
100 repo_name = hg_match.group(1).strip('/')
108 repo_name = self.maybe_translate_repo_uid(hg_match.group(1).strip('/'))
101 109 return vcs_type, repo_name, mode
102 110
103 111 git_pattern = r'^git-(receive-pack|upload-pack)\s\'[/]?(\S+?)(|\.git)\'$'
104 112 git_match = re.match(git_pattern, self.command)
105 113 if git_match is not None:
106 114 vcs_type = 'git'
107 repo_name = git_match.group(2).strip('/')
115 repo_name = self.maybe_translate_repo_uid(git_match.group(2).strip('/'))
108 116 mode = git_match.group(1)
109 117 return vcs_type, repo_name, mode
110 118
111 119 svn_pattern = r'^svnserve -t'
112 120 svn_match = re.match(svn_pattern, self.command)
113 121
114 122 if svn_match is not None:
115 123 vcs_type = 'svn'
116 124 # Repo name should be extracted from the input stream, we're unable to
117 125 # extract it at this point in execution
118 126 return vcs_type, repo_name, mode
119 127
120 128 return vcs_type, repo_name, mode
121 129
122 130 def serve(self, vcs, repo, mode, user, permissions, branch_permissions):
123 131 store = ScmModel().repos_path
124 132
125 133 check_branch_perms = False
126 134 detect_force_push = False
127 135
128 136 if branch_permissions:
129 137 check_branch_perms = True
130 138 detect_force_push = True
131 139
132 140 log.debug(
133 141 'VCS detected:`%s` mode: `%s` repo_name: %s, branch_permission_checks:%s',
134 142 vcs, mode, repo, check_branch_perms)
135 143
136 144 # detect if we have to check branch permissions
137 145 extras = {
138 146 'detect_force_push': detect_force_push,
139 147 'check_branch_perms': check_branch_perms,
140 148 }
141 149
142 150 if vcs == 'hg':
143 151 server = MercurialServer(
144 152 store=store, ini_path=self.ini_path,
145 153 repo_name=repo, user=user,
146 154 user_permissions=permissions, config=self.config, env=self.env)
147 155 self.server_impl = server
148 156 return server.run(tunnel_extras=extras)
149 157
150 158 elif vcs == 'git':
151 159 server = GitServer(
152 160 store=store, ini_path=self.ini_path,
153 161 repo_name=repo, repo_mode=mode, user=user,
154 162 user_permissions=permissions, config=self.config, env=self.env)
155 163 self.server_impl = server
156 164 return server.run(tunnel_extras=extras)
157 165
158 166 elif vcs == 'svn':
159 167 server = SubversionServer(
160 168 store=store, ini_path=self.ini_path,
161 169 repo_name=None, user=user,
162 170 user_permissions=permissions, config=self.config, env=self.env)
163 171 self.server_impl = server
164 172 return server.run(tunnel_extras=extras)
165 173
166 174 else:
167 175 raise Exception('Unrecognised VCS: {}'.format(vcs))
168 176
169 177 def wrap(self):
170 178 mode = self.mode
171 179 user = self.user
172 180 user_id = self.user_id
173 181 key_id = self.key_id
174 182 shell = self.shell
175 183
176 184 scm_detected, scm_repo, scm_mode = self.get_repo_details(mode)
177 185
178 186 log.debug(
179 187 'Mode: `%s` User: `%s:%s` Shell: `%s` SSH Command: `\"%s\"` '
180 188 'SCM_DETECTED: `%s` SCM Mode: `%s` SCM Repo: `%s`',
181 189 mode, user, user_id, shell, self.command,
182 190 scm_detected, scm_mode, scm_repo)
183 191
184 192 # update last access time for this key
185 193 self.update_key_access_time(key_id)
186 194
187 195 log.debug('SSH Connection info %s', self.get_connection_info())
188 196
189 197 if shell and self.command is None:
190 198 log.info('Dropping to shell, no command given and shell is allowed')
191 199 os.execl('/bin/bash', '-l')
192 200 exit_code = 1
193 201
194 202 elif scm_detected:
195 203 user = User.get(user_id)
196 204 if not user:
197 205 log.warning('User with id %s not found', user_id)
198 206 exit_code = -1
199 207 return exit_code
200 208
201 209 auth_user = user.AuthUser()
202 210 permissions = auth_user.permissions['repositories']
203 211 repo_branch_permissions = auth_user.get_branch_permissions(scm_repo)
204 212 try:
205 213 exit_code, is_updated = self.serve(
206 214 scm_detected, scm_repo, scm_mode, user, permissions,
207 215 repo_branch_permissions)
208 216 except Exception:
209 217 log.exception('Error occurred during execution of SshWrapper')
210 218 exit_code = -1
211 219
212 220 elif self.command is None and shell is False:
213 221 log.error('No Command given.')
214 222 exit_code = -1
215 223
216 224 else:
217 225 log.error('Unhandled Command: "%s" Aborting.', self.command)
218 226 exit_code = -1
219 227
220 228 return exit_code
@@ -1,152 +1,156 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 import os
23
22 24 import mock
23 25 import pytest
24 26
25 27 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
26 28 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
27 29
28 30
29 31 class GitServerCreator(object):
30 32 root = '/tmp/repo/path/'
31 33 git_path = '/usr/local/bin/git'
32 34 config_data = {
33 35 'app:main': {
34 36 'ssh.executable.git': git_path,
35 37 'vcs.hooks.protocol': 'http',
36 38 }
37 39 }
38 40 repo_name = 'test_git'
39 41 repo_mode = 'receive-pack'
40 42 user = plain_dummy_user()
41 43
42 44 def __init__(self):
43 45 def config_get(part, key):
44 46 return self.config_data.get(part, {}).get(key)
45 47 self.config_mock = mock.Mock()
46 48 self.config_mock.get = mock.Mock(side_effect=config_get)
47 49
48 50 def create(self, **kwargs):
49 51 parameters = {
50 52 'store': self.root,
51 53 'ini_path': '',
52 54 'user': self.user,
53 55 'repo_name': self.repo_name,
54 56 'repo_mode': self.repo_mode,
55 57 'user_permissions': {
56 58 self.repo_name: 'repository.admin'
57 59 },
58 60 'config': self.config_mock,
59 61 'env': plain_dummy_env()
60 62 }
61 63 parameters.update(kwargs)
62 64 server = GitServer(**parameters)
63 65 return server
64 66
65 67
66 68 @pytest.fixture()
67 69 def git_server(app):
68 70 return GitServerCreator()
69 71
70 72
71 73 class TestGitServer(object):
72 74
73 75 def test_command(self, git_server):
74 76 server = git_server.create()
75 77 expected_command = (
76 78 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
77 79 root=git_server.root, git_path=git_server.git_path,
78 80 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
79 81 )
80 82 assert expected_command == server.tunnel.command()
81 83
82 84 @pytest.mark.parametrize('permissions, action, code', [
83 85 ({}, 'pull', -2),
84 86 ({'test_git': 'repository.read'}, 'pull', 0),
85 87 ({'test_git': 'repository.read'}, 'push', -2),
86 88 ({'test_git': 'repository.write'}, 'push', 0),
87 89 ({'test_git': 'repository.admin'}, 'push', 0),
88 90
89 91 ])
90 92 def test_permission_checks(self, git_server, permissions, action, code):
91 93 server = git_server.create(user_permissions=permissions)
92 94 result = server._check_permissions(action)
93 95 assert result is code
94 96
95 97 @pytest.mark.parametrize('permissions, value', [
96 98 ({}, False),
97 99 ({'test_git': 'repository.read'}, False),
98 100 ({'test_git': 'repository.write'}, True),
99 101 ({'test_git': 'repository.admin'}, True),
100 102
101 103 ])
102 104 def test_has_write_permissions(self, git_server, permissions, value):
103 105 server = git_server.create(user_permissions=permissions)
104 106 result = server.has_write_perm()
105 107 assert result is value
106 108
107 109 def test_run_returns_executes_command(self, git_server):
108 110 server = git_server.create()
109 111 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
112
113 os.environ['SSH_CLIENT'] = '127.0.0.1'
110 114 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
111 115 _patch.return_value = 0
112 116 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
113 117 exit_code = server.run()
114 118
115 119 assert exit_code == (0, False)
116 120
117 121 @pytest.mark.parametrize(
118 122 'repo_mode, action', [
119 123 ['receive-pack', 'push'],
120 124 ['upload-pack', 'pull']
121 125 ])
122 126 def test_update_environment(self, git_server, repo_mode, action):
123 127 server = git_server.create(repo_mode=repo_mode)
124 128 store = server.store
125 129
126 130 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
127 131 with mock.patch('os.putenv') as putenv_mock:
128 132 server.update_environment(action)
129 133
130 134 expected_data = {
131 135 'username': git_server.user.username,
132 136 'user_id': git_server.user.user_id,
133 137 'scm': 'git',
134 138 'repository': git_server.repo_name,
135 139 'make_lock': None,
136 140 'action': action,
137 141 'ip': '10.10.10.10',
138 142 'locked_by': [None, None],
139 143 'config': '',
140 144 'repo_store': store,
141 145 'server_url': None,
142 146 'hooks': ['push', 'pull'],
143 147 'is_shadow_repo': False,
144 148 'hooks_module': 'rhodecode.lib.hooks_daemon',
145 149 'check_branch_perms': False,
146 150 'detect_force_push': False,
147 151 'user_agent': u'ssh-user-agent',
148 152 'SSH': True,
149 153 'SSH_PERMISSIONS': 'repository.admin',
150 154 }
151 155 args, kwargs = putenv_mock.call_args
152 156 assert json.loads(args[1]) == expected_data
@@ -1,119 +1,120 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import mock
23 23 import pytest
24 24
25 25 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialServer
26 26 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
27 27
28 28
29 29 class MercurialServerCreator(object):
30 30 root = '/tmp/repo/path/'
31 31 hg_path = '/usr/local/bin/hg'
32 32
33 33 config_data = {
34 34 'app:main': {
35 35 'ssh.executable.hg': hg_path,
36 36 'vcs.hooks.protocol': 'http',
37 37 }
38 38 }
39 39 repo_name = 'test_hg'
40 40 user = plain_dummy_user()
41 41
42 42 def __init__(self):
43 43 def config_get(part, key):
44 44 return self.config_data.get(part, {}).get(key)
45 45 self.config_mock = mock.Mock()
46 46 self.config_mock.get = mock.Mock(side_effect=config_get)
47 47
48 48 def create(self, **kwargs):
49 49 parameters = {
50 50 'store': self.root,
51 51 'ini_path': '',
52 52 'user': self.user,
53 53 'repo_name': self.repo_name,
54 54 'user_permissions': {
55 55 'test_hg': 'repository.admin'
56 56 },
57 57 'config': self.config_mock,
58 58 'env': plain_dummy_env()
59 59 }
60 60 parameters.update(kwargs)
61 61 server = MercurialServer(**parameters)
62 62 return server
63 63
64 64
65 65 @pytest.fixture()
66 66 def hg_server(app):
67 67 return MercurialServerCreator()
68 68
69 69
70 70 class TestMercurialServer(object):
71 71
72 72 def test_command(self, hg_server, tmpdir):
73 73 server = hg_server.create()
74 74 custom_hgrc = os.path.join(str(tmpdir), 'hgrc')
75 75 expected_command = (
76 76 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format(
77 77 root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path,
78 78 repo_name=hg_server.repo_name)
79 79 )
80 80 server_command = server.tunnel.command(custom_hgrc)
81 81 assert expected_command == server_command
82 82
83 83 @pytest.mark.parametrize('permissions, action, code', [
84 84 ({}, 'pull', -2),
85 85 ({'test_hg': 'repository.read'}, 'pull', 0),
86 86 ({'test_hg': 'repository.read'}, 'push', -2),
87 87 ({'test_hg': 'repository.write'}, 'push', 0),
88 88 ({'test_hg': 'repository.admin'}, 'push', 0),
89 89
90 90 ])
91 91 def test_permission_checks(self, hg_server, permissions, action, code):
92 92 server = hg_server.create(user_permissions=permissions)
93 93 result = server._check_permissions(action)
94 94 assert result is code
95 95
96 96 @pytest.mark.parametrize('permissions, value', [
97 97 ({}, False),
98 98 ({'test_hg': 'repository.read'}, False),
99 99 ({'test_hg': 'repository.write'}, True),
100 100 ({'test_hg': 'repository.admin'}, True),
101 101
102 102 ])
103 103 def test_has_write_permissions(self, hg_server, permissions, value):
104 104 server = hg_server.create(user_permissions=permissions)
105 105 result = server.has_write_perm()
106 106 assert result is value
107 107
108 108 def test_run_returns_executes_command(self, hg_server):
109 109 server = hg_server.create()
110 110 from rhodecode.apps.ssh_support.lib.backends.hg import MercurialTunnelWrapper
111 os.environ['SSH_CLIENT'] = '127.0.0.1'
111 112 with mock.patch.object(MercurialTunnelWrapper, 'create_hooks_env') as _patch:
112 113 _patch.return_value = 0
113 114 with mock.patch.object(MercurialTunnelWrapper, 'command', return_value='date'):
114 115 exit_code = server.run()
115 116
116 117 assert exit_code == (0, False)
117 118
118 119
119 120
@@ -1,206 +1,207 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20 import os
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionServer
25 25 from rhodecode.apps.ssh_support.tests.conftest import plain_dummy_env, plain_dummy_user
26 26
27 27
28 28 class SubversionServerCreator(object):
29 29 root = '/tmp/repo/path/'
30 30 svn_path = '/usr/local/bin/svnserve'
31 31 config_data = {
32 32 'app:main': {
33 33 'ssh.executable.svn': svn_path,
34 34 'vcs.hooks.protocol': 'http',
35 35 }
36 36 }
37 37 repo_name = 'test-svn'
38 38 user = plain_dummy_user()
39 39
40 40 def __init__(self):
41 41 def config_get(part, key):
42 42 return self.config_data.get(part, {}).get(key)
43 43 self.config_mock = mock.Mock()
44 44 self.config_mock.get = mock.Mock(side_effect=config_get)
45 45
46 46 def create(self, **kwargs):
47 47 parameters = {
48 48 'store': self.root,
49 49 'repo_name': self.repo_name,
50 50 'ini_path': '',
51 51 'user': self.user,
52 52 'user_permissions': {
53 53 self.repo_name: 'repository.admin'
54 54 },
55 55 'config': self.config_mock,
56 56 'env': plain_dummy_env()
57 57 }
58 58
59 59 parameters.update(kwargs)
60 60 server = SubversionServer(**parameters)
61 61 return server
62 62
63 63
64 64 @pytest.fixture()
65 65 def svn_server(app):
66 66 return SubversionServerCreator()
67 67
68 68
69 69 class TestSubversionServer(object):
70 70 def test_command(self, svn_server):
71 71 server = svn_server.create()
72 72 expected_command = [
73 73 svn_server.svn_path, '-t',
74 74 '--config-file', server.tunnel.svn_conf_path,
75 75 '--tunnel-user', svn_server.user.username,
76 76 '-r', svn_server.root
77 77 ]
78 78
79 79 assert expected_command == server.tunnel.command()
80 80
81 81 @pytest.mark.parametrize('permissions, action, code', [
82 82 ({}, 'pull', -2),
83 83 ({'test-svn': 'repository.read'}, 'pull', 0),
84 84 ({'test-svn': 'repository.read'}, 'push', -2),
85 85 ({'test-svn': 'repository.write'}, 'push', 0),
86 86 ({'test-svn': 'repository.admin'}, 'push', 0),
87 87
88 88 ])
89 89 def test_permission_checks(self, svn_server, permissions, action, code):
90 90 server = svn_server.create(user_permissions=permissions)
91 91 result = server._check_permissions(action)
92 92 assert result is code
93 93
94 94 @pytest.mark.parametrize('permissions, access_paths, expected_match', [
95 95 # not matched repository name
96 96 ({
97 97 'test-svn': ''
98 98 }, ['test-svn-1', 'test-svn-1/subpath'],
99 99 None),
100 100
101 101 # exact match
102 102 ({
103 103 'test-svn': ''
104 104 },
105 105 ['test-svn'],
106 106 'test-svn'),
107 107
108 108 # subdir commits
109 109 ({
110 110 'test-svn': ''
111 111 },
112 112 ['test-svn/foo',
113 113 'test-svn/foo/test-svn',
114 114 'test-svn/trunk/development.txt',
115 115 ],
116 116 'test-svn'),
117 117
118 118 # subgroups + similar patterns
119 119 ({
120 120 'test-svn': '',
121 121 'test-svn-1': '',
122 122 'test-svn-subgroup/test-svn': '',
123 123
124 124 },
125 125 ['test-svn-1',
126 126 'test-svn-1/foo/test-svn',
127 127 'test-svn-1/test-svn',
128 128 ],
129 129 'test-svn-1'),
130 130
131 131 # subgroups + similar patterns
132 132 ({
133 133 'test-svn-1': '',
134 134 'test-svn-10': '',
135 135 'test-svn-100': '',
136 136 },
137 137 ['test-svn-10',
138 138 'test-svn-10/foo/test-svn',
139 139 'test-svn-10/test-svn',
140 140 ],
141 141 'test-svn-10'),
142 142
143 143 # subgroups + similar patterns
144 144 ({
145 145 'name': '',
146 146 'nameContains': '',
147 147 'nameContainsThis': '',
148 148 },
149 149 ['nameContains',
150 150 'nameContains/This',
151 151 'nameContains/This/test-svn',
152 152 ],
153 153 'nameContains'),
154 154
155 155 # subgroups + similar patterns
156 156 ({
157 157 'test-svn': '',
158 158 'test-svn-1': '',
159 159 'test-svn-subgroup/test-svn': '',
160 160
161 161 },
162 162 ['test-svn-subgroup/test-svn',
163 163 'test-svn-subgroup/test-svn/foo/test-svn',
164 164 'test-svn-subgroup/test-svn/trunk/example.txt',
165 165 ],
166 166 'test-svn-subgroup/test-svn'),
167 167 ])
168 168 def test_repo_extraction_on_subdir(self, svn_server, permissions, access_paths, expected_match):
169 169 server = svn_server.create(user_permissions=permissions)
170 170 for path in access_paths:
171 171 repo_name = server.tunnel._match_repo_name(path)
172 172 assert repo_name == expected_match
173 173
174 174 def test_run_returns_executes_command(self, svn_server):
175 175 server = svn_server.create()
176 176 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
177 os.environ['SSH_CLIENT'] = '127.0.0.1'
177 178 with mock.patch.object(
178 179 SubversionTunnelWrapper, 'get_first_client_response',
179 180 return_value={'url': 'http://server/test-svn'}):
180 181 with mock.patch.object(
181 182 SubversionTunnelWrapper, 'patch_first_client_response',
182 183 return_value=0):
183 184 with mock.patch.object(
184 185 SubversionTunnelWrapper, 'sync',
185 186 return_value=0):
186 187 with mock.patch.object(
187 188 SubversionTunnelWrapper, 'command',
188 189 return_value=['date']):
189 190
190 191 exit_code = server.run()
191 192 # SVN has this differently configured, and we get in our mock env
192 193 # None as return code
193 194 assert exit_code == (None, False)
194 195
195 196 def test_run_returns_executes_command_that_cannot_extract_repo_name(self, svn_server):
196 197 server = svn_server.create()
197 198 from rhodecode.apps.ssh_support.lib.backends.svn import SubversionTunnelWrapper
198 199 with mock.patch.object(
199 200 SubversionTunnelWrapper, 'command',
200 201 return_value=['date']):
201 202 with mock.patch.object(
202 203 SubversionTunnelWrapper, 'get_first_client_response',
203 204 return_value=None):
204 205 exit_code = server.run()
205 206
206 207 assert exit_code == (1, False)
@@ -1,146 +1,147 b''
1 1 # -*- coding: utf-8 -*-
2 2 # Copyright (C) 2016-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 us in hooks::
22 22
23 23 from .helpers import extract_pre_files
24 24 # returns list of dicts with key-val fetched from extra fields
25 25 file_list = extract_pre_files.run(**kwargs)
26 26
27 27 """
28 28 import re
29 29 import collections
30 30 import json
31 31
32 32 from rhodecode.lib import diffs
33 33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 34 from rhodecode.lib.vcs.backends.git.diff import GitDiff
35 from vcsserver.utils import safe_int
35 36
36 37
37 38 def get_svn_files(repo, vcs_repo, refs):
38 39 txn_id = refs[0]
39 40 files = []
40 41
41 42 stdout, stderr = vcs_repo.run_svn_command(
42 43 ['svnlook', 'changed', repo.repo_full_path, '--transaction', txn_id])
43 44
44 45 svn_op_to_rc_op = {
45 46 'A': 'A',
46 47 'U': 'M',
47 48 'D': 'D',
48 49 }
49 50
50 51 for entry in stdout.splitlines():
51 52 parsed_entry = {
52 53 'raw_diff': '',
53 54 'filename': '',
54 55 'chunks': [],
55 56 'ops': {},
56 57 'file_size': 0
57 58 }
58 59
59 60 op = entry[0]
60 61 path = entry[1:].strip()
61 62
62 63 rc_op = svn_op_to_rc_op.get(op) or '?'
63 64 parsed_entry['filename'] = path
64 65 parsed_entry['operation'] = rc_op
65 66
66 67 if rc_op in ['A', 'M']:
67 68
68 69 stdout, stderr = vcs_repo.run_svn_command(
69 70 ['svnlook', 'filesize', repo.repo_full_path, path, '--transaction', txn_id],
70 71 _safe=True
71 72 )
72 73
73 74 if "Path '{}' is not a file".format(path.rstrip('/')) in stderr:
74 75 # skip dirs
75 76 continue
76 77
77 parsed_entry['file_size'] = int(stdout.strip())
78 parsed_entry['file_size'] = safe_int(stdout.strip()) or 0
78 79
79 80 files.append(parsed_entry)
80 81
81 82 return files
82 83
83 84
84 85 def get_hg_files(repo, vcs_repo, refs):
85 86 files = []
86 87 return files
87 88
88 89
89 90 def get_git_files(repo, vcs_repo, refs):
90 91 files = []
91 92
92 93 for data in refs:
93 94 # we should now extract commit data
94 95 old_rev = data['old_rev']
95 96 new_rev = data['new_rev']
96 97
97 98 if '00000000' in old_rev:
98 99 # new branch, we don't need to extract nothing
99 100 return files
100 101
101 102 git_env = dict(data['git_env'])
102 103
103 104 cmd = [
104 105 'diff', old_rev, new_rev
105 106 ]
106 107
107 108 stdout, stderr = vcs_repo.run_git_command(cmd, extra_env=git_env)
108 109 vcs_diff = GitDiff(stdout)
109 110
110 111 diff_processor = diffs.DiffProcessor(vcs_diff, format='newdiff')
111 112 # this is list of dicts with diff information
112 113 # _parsed[0].keys()
113 114 # ['raw_diff', 'old_revision', 'stats', 'original_filename',
114 115 # 'is_limited_diff', 'chunks', 'new_revision', 'operation',
115 116 # 'exceeds_limit', 'filename']
116 117 files = _parsed = diff_processor.prepare()
117 118
118 119 return files
119 120
120 121
121 122 def run(*args, **kwargs):
122 123 from rhodecode.model.db import Repository
123 124
124 125 vcs_type = kwargs['scm']
125 126 # use temp name then the main one propagated
126 127 repo_name = kwargs.pop('REPOSITORY', None) or kwargs['repository']
127 128
128 129 repo = Repository.get_by_repo_name(repo_name)
129 130 vcs_repo = repo.scm_instance(cache=False)
130 131
131 132 files = []
132 133
133 134 if vcs_type == 'git':
134 135 for rev_data in kwargs['commit_ids']:
135 136 new_environ = dict((k, v) for k, v in rev_data['git_env'])
136 137 files = get_git_files(repo, vcs_repo, kwargs['commit_ids'])
137 138
138 139 if vcs_type == 'hg':
139 140 for rev_data in kwargs['commit_ids']:
140 141 new_environ = dict((k, v) for k, v in rev_data['hg_env'])
141 142 files = get_hg_files(repo, vcs_repo, kwargs['commit_ids'])
142 143
143 144 if vcs_type == 'svn':
144 145 files = get_svn_files(repo, vcs_repo, kwargs['commit_ids'])
145 146
146 147 return files
@@ -1,2511 +1,2513 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26
27 27 import colander
28 28 import time
29 29 import collections
30 30 import fnmatch
31 31 import hashlib
32 32 import itertools
33 33 import logging
34 34 import random
35 35 import traceback
36 36 from functools import wraps
37 37
38 38 import ipaddress
39 39
40 40 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
41 41 from sqlalchemy.orm.exc import ObjectDeletedError
42 42 from sqlalchemy.orm import joinedload
43 43 from zope.cachedescriptors.property import Lazy as LazyProperty
44 44
45 45 import rhodecode
46 46 from rhodecode.model import meta
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.user import UserModel
49 49 from rhodecode.model.db import (
50 50 false, User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
51 51 UserIpMap, UserApiKeys, RepoGroup, UserGroup, UserNotice)
52 52 from rhodecode.lib import rc_cache
53 53 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5, safe_int, sha1
54 54 from rhodecode.lib.utils import (
55 55 get_repo_slug, get_repo_group_slug, get_user_group_slug)
56 56 from rhodecode.lib.caching_query import FromCache
57 57
58 58 if rhodecode.is_unix:
59 59 import bcrypt
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63 csrf_token_key = "csrf_token"
64 64
65 65
66 66 class PasswordGenerator(object):
67 67 """
68 68 This is a simple class for generating password from different sets of
69 69 characters
70 70 usage::
71 71 passwd_gen = PasswordGenerator()
72 72 #print 8-letter password containing only big and small letters
73 73 of alphabet
74 74 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
75 75 """
76 76 ALPHABETS_NUM = r'''1234567890'''
77 77 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
78 78 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
79 79 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
80 80 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
81 81 + ALPHABETS_NUM + ALPHABETS_SPECIAL
82 82 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
83 83 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
84 84 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
85 85 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
86 86
87 87 def __init__(self, passwd=''):
88 88 self.passwd = passwd
89 89
90 90 def gen_password(self, length, type_=None):
91 91 if type_ is None:
92 92 type_ = self.ALPHABETS_FULL
93 93 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
94 94 return self.passwd
95 95
96 96
97 97 class _RhodeCodeCryptoBase(object):
98 98 ENC_PREF = None
99 99
100 100 def hash_create(self, str_):
101 101 """
102 102 hash the string using
103 103
104 104 :param str_: password to hash
105 105 """
106 106 raise NotImplementedError
107 107
108 108 def hash_check_with_upgrade(self, password, hashed):
109 109 """
110 110 Returns tuple in which first element is boolean that states that
111 111 given password matches it's hashed version, and the second is new hash
112 112 of the password, in case this password should be migrated to new
113 113 cipher.
114 114 """
115 115 checked_hash = self.hash_check(password, hashed)
116 116 return checked_hash, None
117 117
118 118 def hash_check(self, password, hashed):
119 119 """
120 120 Checks matching password with it's hashed value.
121 121
122 122 :param password: password
123 123 :param hashed: password in hashed form
124 124 """
125 125 raise NotImplementedError
126 126
127 127 def _assert_bytes(self, value):
128 128 """
129 129 Passing in an `unicode` object can lead to hard to detect issues
130 130 if passwords contain non-ascii characters. Doing a type check
131 131 during runtime, so that such mistakes are detected early on.
132 132 """
133 133 if not isinstance(value, str):
134 134 raise TypeError(
135 135 "Bytestring required as input, got %r." % (value, ))
136 136
137 137
138 138 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
139 139 ENC_PREF = ('$2a$10', '$2b$10')
140 140
141 141 def hash_create(self, str_):
142 142 self._assert_bytes(str_)
143 143 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
144 144
145 145 def hash_check_with_upgrade(self, password, hashed):
146 146 """
147 147 Returns tuple in which first element is boolean that states that
148 148 given password matches it's hashed version, and the second is new hash
149 149 of the password, in case this password should be migrated to new
150 150 cipher.
151 151
152 152 This implements special upgrade logic which works like that:
153 153 - check if the given password == bcrypted hash, if yes then we
154 154 properly used password and it was already in bcrypt. Proceed
155 155 without any changes
156 156 - if bcrypt hash check is not working try with sha256. If hash compare
157 157 is ok, it means we using correct but old hashed password. indicate
158 158 hash change and proceed
159 159 """
160 160
161 161 new_hash = None
162 162
163 163 # regular pw check
164 164 password_match_bcrypt = self.hash_check(password, hashed)
165 165
166 166 # now we want to know if the password was maybe from sha256
167 167 # basically calling _RhodeCodeCryptoSha256().hash_check()
168 168 if not password_match_bcrypt:
169 169 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
170 170 new_hash = self.hash_create(password) # make new bcrypt hash
171 171 password_match_bcrypt = True
172 172
173 173 return password_match_bcrypt, new_hash
174 174
175 175 def hash_check(self, password, hashed):
176 176 """
177 177 Checks matching password with it's hashed value.
178 178
179 179 :param password: password
180 180 :param hashed: password in hashed form
181 181 """
182 182 self._assert_bytes(password)
183 183 try:
184 184 return bcrypt.hashpw(password, hashed) == hashed
185 185 except ValueError as e:
186 186 # we're having a invalid salt here probably, we should not crash
187 187 # just return with False as it would be a wrong password.
188 188 log.debug('Failed to check password hash using bcrypt %s',
189 189 safe_str(e))
190 190
191 191 return False
192 192
193 193
194 194 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
195 195 ENC_PREF = '_'
196 196
197 197 def hash_create(self, str_):
198 198 self._assert_bytes(str_)
199 199 return hashlib.sha256(str_).hexdigest()
200 200
201 201 def hash_check(self, password, hashed):
202 202 """
203 203 Checks matching password with it's hashed value.
204 204
205 205 :param password: password
206 206 :param hashed: password in hashed form
207 207 """
208 208 self._assert_bytes(password)
209 209 return hashlib.sha256(password).hexdigest() == hashed
210 210
211 211
212 212 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
213 213 ENC_PREF = '_'
214 214
215 215 def hash_create(self, str_):
216 216 self._assert_bytes(str_)
217 217 return sha1(str_)
218 218
219 219 def hash_check(self, password, hashed):
220 220 """
221 221 Checks matching password with it's hashed value.
222 222
223 223 :param password: password
224 224 :param hashed: password in hashed form
225 225 """
226 226 self._assert_bytes(password)
227 227 return sha1(password) == hashed
228 228
229 229
230 230 def crypto_backend():
231 231 """
232 232 Return the matching crypto backend.
233 233
234 234 Selection is based on if we run tests or not, we pick sha1-test backend to run
235 235 tests faster since BCRYPT is expensive to calculate
236 236 """
237 237 if rhodecode.is_test:
238 238 RhodeCodeCrypto = _RhodeCodeCryptoTest()
239 239 else:
240 240 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
241 241
242 242 return RhodeCodeCrypto
243 243
244 244
245 245 def get_crypt_password(password):
246 246 """
247 247 Create the hash of `password` with the active crypto backend.
248 248
249 249 :param password: The cleartext password.
250 250 :type password: unicode
251 251 """
252 252 password = safe_str(password)
253 253 return crypto_backend().hash_create(password)
254 254
255 255
256 256 def check_password(password, hashed):
257 257 """
258 258 Check if the value in `password` matches the hash in `hashed`.
259 259
260 260 :param password: The cleartext password.
261 261 :type password: unicode
262 262
263 263 :param hashed: The expected hashed version of the password.
264 264 :type hashed: The hash has to be passed in in text representation.
265 265 """
266 266 password = safe_str(password)
267 267 return crypto_backend().hash_check(password, hashed)
268 268
269 269
270 270 def generate_auth_token(data, salt=None):
271 271 """
272 272 Generates API KEY from given string
273 273 """
274 274
275 275 if salt is None:
276 276 salt = os.urandom(16)
277 277 return hashlib.sha1(safe_str(data) + salt).hexdigest()
278 278
279 279
280 280 def get_came_from(request):
281 281 """
282 282 get query_string+path from request sanitized after removing auth_token
283 283 """
284 284 _req = request
285 285
286 286 path = _req.path
287 287 if 'auth_token' in _req.GET:
288 288 # sanitize the request and remove auth_token for redirection
289 289 _req.GET.pop('auth_token')
290 290 qs = _req.query_string
291 291 if qs:
292 292 path += '?' + qs
293 293
294 294 return path
295 295
296 296
297 297 class CookieStoreWrapper(object):
298 298
299 299 def __init__(self, cookie_store):
300 300 self.cookie_store = cookie_store
301 301
302 302 def __repr__(self):
303 303 return 'CookieStore<%s>' % (self.cookie_store)
304 304
305 305 def get(self, key, other=None):
306 306 if isinstance(self.cookie_store, dict):
307 307 return self.cookie_store.get(key, other)
308 308 elif isinstance(self.cookie_store, AuthUser):
309 309 return self.cookie_store.__dict__.get(key, other)
310 310
311 311
312 312 def _cached_perms_data(user_id, scope, user_is_admin,
313 313 user_inherit_default_permissions, explicit, algo,
314 314 calculate_super_admin):
315 315
316 316 permissions = PermissionCalculator(
317 317 user_id, scope, user_is_admin, user_inherit_default_permissions,
318 318 explicit, algo, calculate_super_admin)
319 319 return permissions.calculate()
320 320
321 321
322 322 class PermOrigin(object):
323 323 SUPER_ADMIN = 'superadmin'
324 324 ARCHIVED = 'archived'
325 325
326 326 REPO_USER = 'user:%s'
327 327 REPO_USERGROUP = 'usergroup:%s'
328 328 REPO_OWNER = 'repo.owner'
329 329 REPO_DEFAULT = 'repo.default'
330 330 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
331 331 REPO_PRIVATE = 'repo.private'
332 332
333 333 REPOGROUP_USER = 'user:%s'
334 334 REPOGROUP_USERGROUP = 'usergroup:%s'
335 335 REPOGROUP_OWNER = 'group.owner'
336 336 REPOGROUP_DEFAULT = 'group.default'
337 337 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
338 338
339 339 USERGROUP_USER = 'user:%s'
340 340 USERGROUP_USERGROUP = 'usergroup:%s'
341 341 USERGROUP_OWNER = 'usergroup.owner'
342 342 USERGROUP_DEFAULT = 'usergroup.default'
343 343 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
344 344
345 345
346 346 class PermOriginDict(dict):
347 347 """
348 348 A special dict used for tracking permissions along with their origins.
349 349
350 350 `__setitem__` has been overridden to expect a tuple(perm, origin)
351 351 `__getitem__` will return only the perm
352 352 `.perm_origin_stack` will return the stack of (perm, origin) set per key
353 353
354 354 >>> perms = PermOriginDict()
355 355 >>> perms['resource'] = 'read', 'default', 1
356 356 >>> perms['resource']
357 357 'read'
358 358 >>> perms['resource'] = 'write', 'admin', 2
359 359 >>> perms['resource']
360 360 'write'
361 361 >>> perms.perm_origin_stack
362 362 {'resource': [('read', 'default', 1), ('write', 'admin', 2)]}
363 363 """
364 364
365 365 def __init__(self, *args, **kw):
366 366 dict.__init__(self, *args, **kw)
367 367 self.perm_origin_stack = collections.OrderedDict()
368 368
369 369 def __setitem__(self, key, (perm, origin, obj_id)):
370 370 self.perm_origin_stack.setdefault(key, []).append((perm, origin, obj_id))
371 371 dict.__setitem__(self, key, perm)
372 372
373 373
374 374 class BranchPermOriginDict(PermOriginDict):
375 375 """
376 376 Dedicated branch permissions dict, with tracking of patterns and origins.
377 377
378 378 >>> perms = BranchPermOriginDict()
379 379 >>> perms['resource'] = '*pattern', 'read', 'default'
380 380 >>> perms['resource']
381 381 {'*pattern': 'read'}
382 382 >>> perms['resource'] = '*pattern', 'write', 'admin'
383 383 >>> perms['resource']
384 384 {'*pattern': 'write'}
385 385 >>> perms.perm_origin_stack
386 386 {'resource': {'*pattern': [('read', 'default'), ('write', 'admin')]}}
387 387 """
388 388 def __setitem__(self, key, (pattern, perm, origin)):
389 389
390 390 self.perm_origin_stack.setdefault(key, {}) \
391 391 .setdefault(pattern, []).append((perm, origin))
392 392
393 393 if key in self:
394 394 self[key].__setitem__(pattern, perm)
395 395 else:
396 396 patterns = collections.OrderedDict()
397 397 patterns[pattern] = perm
398 398 dict.__setitem__(self, key, patterns)
399 399
400 400
401 401 class PermissionCalculator(object):
402 402
403 403 def __init__(
404 404 self, user_id, scope, user_is_admin,
405 405 user_inherit_default_permissions, explicit, algo,
406 406 calculate_super_admin_as_user=False):
407 407
408 408 self.user_id = user_id
409 409 self.user_is_admin = user_is_admin
410 410 self.inherit_default_permissions = user_inherit_default_permissions
411 411 self.explicit = explicit
412 412 self.algo = algo
413 413 self.calculate_super_admin_as_user = calculate_super_admin_as_user
414 414
415 415 scope = scope or {}
416 416 self.scope_repo_id = scope.get('repo_id')
417 417 self.scope_repo_group_id = scope.get('repo_group_id')
418 418 self.scope_user_group_id = scope.get('user_group_id')
419 419
420 420 self.default_user_id = User.get_default_user(cache=True).user_id
421 421
422 422 self.permissions_repositories = PermOriginDict()
423 423 self.permissions_repository_groups = PermOriginDict()
424 424 self.permissions_user_groups = PermOriginDict()
425 425 self.permissions_repository_branches = BranchPermOriginDict()
426 426 self.permissions_global = set()
427 427
428 428 self.default_repo_perms = Permission.get_default_repo_perms(
429 429 self.default_user_id, self.scope_repo_id)
430 430 self.default_repo_groups_perms = Permission.get_default_group_perms(
431 431 self.default_user_id, self.scope_repo_group_id)
432 432 self.default_user_group_perms = \
433 433 Permission.get_default_user_group_perms(
434 434 self.default_user_id, self.scope_user_group_id)
435 435
436 436 # default branch perms
437 437 self.default_branch_repo_perms = \
438 438 Permission.get_default_repo_branch_perms(
439 439 self.default_user_id, self.scope_repo_id)
440 440
441 441 def calculate(self):
442 442 if self.user_is_admin and not self.calculate_super_admin_as_user:
443 443 return self._calculate_super_admin_permissions()
444 444
445 445 self._calculate_global_default_permissions()
446 446 self._calculate_global_permissions()
447 447 self._calculate_default_permissions()
448 448 self._calculate_repository_permissions()
449 449 self._calculate_repository_branch_permissions()
450 450 self._calculate_repository_group_permissions()
451 451 self._calculate_user_group_permissions()
452 452 return self._permission_structure()
453 453
454 454 def _calculate_super_admin_permissions(self):
455 455 """
456 456 super-admin user have all default rights for repositories
457 457 and groups set to admin
458 458 """
459 459 self.permissions_global.add('hg.admin')
460 460 self.permissions_global.add('hg.create.write_on_repogroup.true')
461 461
462 462 # repositories
463 463 for perm in self.default_repo_perms:
464 464 r_k = perm.UserRepoToPerm.repository.repo_name
465 465 obj_id = perm.UserRepoToPerm.repository.repo_id
466 466 archived = perm.UserRepoToPerm.repository.archived
467 467 p = 'repository.admin'
468 468 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN, obj_id
469 469 # special case for archived repositories, which we block still even for
470 470 # super admins
471 471 if archived:
472 472 p = 'repository.read'
473 473 self.permissions_repositories[r_k] = p, PermOrigin.ARCHIVED, obj_id
474 474
475 475 # repository groups
476 476 for perm in self.default_repo_groups_perms:
477 477 rg_k = perm.UserRepoGroupToPerm.group.group_name
478 478 obj_id = perm.UserRepoGroupToPerm.group.group_id
479 479 p = 'group.admin'
480 480 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN, obj_id
481 481
482 482 # user groups
483 483 for perm in self.default_user_group_perms:
484 484 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
485 485 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
486 486 p = 'usergroup.admin'
487 487 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN, obj_id
488 488
489 489 # branch permissions
490 490 # since super-admin also can have custom rule permissions
491 491 # we *always* need to calculate those inherited from default, and also explicit
492 492 self._calculate_default_permissions_repository_branches(
493 493 user_inherit_object_permissions=False)
494 494 self._calculate_repository_branch_permissions()
495 495
496 496 return self._permission_structure()
497 497
498 498 def _calculate_global_default_permissions(self):
499 499 """
500 500 global permissions taken from the default user
501 501 """
502 502 default_global_perms = UserToPerm.query()\
503 503 .filter(UserToPerm.user_id == self.default_user_id)\
504 504 .options(joinedload(UserToPerm.permission))
505 505
506 506 for perm in default_global_perms:
507 507 self.permissions_global.add(perm.permission.permission_name)
508 508
509 509 if self.user_is_admin:
510 510 self.permissions_global.add('hg.admin')
511 511 self.permissions_global.add('hg.create.write_on_repogroup.true')
512 512
513 513 def _calculate_global_permissions(self):
514 514 """
515 515 Set global system permissions with user permissions or permissions
516 516 taken from the user groups of the current user.
517 517
518 518 The permissions include repo creating, repo group creating, forking
519 519 etc.
520 520 """
521 521
522 522 # now we read the defined permissions and overwrite what we have set
523 523 # before those can be configured from groups or users explicitly.
524 524
525 525 # In case we want to extend this list we should make sure
526 526 # this is in sync with User.DEFAULT_USER_PERMISSIONS definitions
527 from rhodecode.model.permission import PermissionModel
528
527 529 _configurable = frozenset([
528 'hg.fork.none', 'hg.fork.repository',
530 PermissionModel.FORKING_DISABLED, PermissionModel.FORKING_ENABLED,
529 531 'hg.create.none', 'hg.create.repository',
530 532 'hg.usergroup.create.false', 'hg.usergroup.create.true',
531 533 'hg.repogroup.create.false', 'hg.repogroup.create.true',
532 534 'hg.create.write_on_repogroup.false', 'hg.create.write_on_repogroup.true',
533 535 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
534 536 ])
535 537
536 538 # USER GROUPS comes first user group global permissions
537 539 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
538 540 .options(joinedload(UserGroupToPerm.permission))\
539 541 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
540 542 UserGroupMember.users_group_id))\
541 543 .filter(UserGroupMember.user_id == self.user_id)\
542 544 .order_by(UserGroupToPerm.users_group_id)\
543 545 .all()
544 546
545 547 # need to group here by groups since user can be in more than
546 548 # one group, so we get all groups
547 549 _explicit_grouped_perms = [
548 550 [x, list(y)] for x, y in
549 551 itertools.groupby(user_perms_from_users_groups,
550 552 lambda _x: _x.users_group)]
551 553
552 554 for gr, perms in _explicit_grouped_perms:
553 555 # since user can be in multiple groups iterate over them and
554 556 # select the lowest permissions first (more explicit)
555 557 # TODO(marcink): do this^^
556 558
557 559 # group doesn't inherit default permissions so we actually set them
558 560 if not gr.inherit_default_permissions:
559 561 # NEED TO IGNORE all previously set configurable permissions
560 562 # and replace them with explicitly set from this user
561 563 # group permissions
562 564 self.permissions_global = self.permissions_global.difference(
563 565 _configurable)
564 566 for perm in perms:
565 567 self.permissions_global.add(perm.permission.permission_name)
566 568
567 569 # user explicit global permissions
568 570 user_perms = Session().query(UserToPerm)\
569 571 .options(joinedload(UserToPerm.permission))\
570 572 .filter(UserToPerm.user_id == self.user_id).all()
571 573
572 574 if not self.inherit_default_permissions:
573 575 # NEED TO IGNORE all configurable permissions and
574 576 # replace them with explicitly set from this user permissions
575 577 self.permissions_global = self.permissions_global.difference(
576 578 _configurable)
577 579 for perm in user_perms:
578 580 self.permissions_global.add(perm.permission.permission_name)
579 581
580 582 def _calculate_default_permissions_repositories(self, user_inherit_object_permissions):
581 583 for perm in self.default_repo_perms:
582 584 r_k = perm.UserRepoToPerm.repository.repo_name
583 585 obj_id = perm.UserRepoToPerm.repository.repo_id
584 586 archived = perm.UserRepoToPerm.repository.archived
585 587 p = perm.Permission.permission_name
586 588 o = PermOrigin.REPO_DEFAULT
587 589 self.permissions_repositories[r_k] = p, o, obj_id
588 590
589 591 # if we decide this user isn't inheriting permissions from
590 592 # default user we set him to .none so only explicit
591 593 # permissions work
592 594 if not user_inherit_object_permissions:
593 595 p = 'repository.none'
594 596 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
595 597 self.permissions_repositories[r_k] = p, o, obj_id
596 598
597 599 if perm.Repository.private and not (
598 600 perm.Repository.user_id == self.user_id):
599 601 # disable defaults for private repos,
600 602 p = 'repository.none'
601 603 o = PermOrigin.REPO_PRIVATE
602 604 self.permissions_repositories[r_k] = p, o, obj_id
603 605
604 606 elif perm.Repository.user_id == self.user_id:
605 607 # set admin if owner
606 608 p = 'repository.admin'
607 609 o = PermOrigin.REPO_OWNER
608 610 self.permissions_repositories[r_k] = p, o, obj_id
609 611
610 612 if self.user_is_admin:
611 613 p = 'repository.admin'
612 614 o = PermOrigin.SUPER_ADMIN
613 615 self.permissions_repositories[r_k] = p, o, obj_id
614 616
615 617 # finally in case of archived repositories, we downgrade higher
616 618 # permissions to read
617 619 if archived:
618 620 current_perm = self.permissions_repositories[r_k]
619 621 if current_perm in ['repository.write', 'repository.admin']:
620 622 p = 'repository.read'
621 623 o = PermOrigin.ARCHIVED
622 624 self.permissions_repositories[r_k] = p, o, obj_id
623 625
624 626 def _calculate_default_permissions_repository_branches(self, user_inherit_object_permissions):
625 627 for perm in self.default_branch_repo_perms:
626 628
627 629 r_k = perm.UserRepoToPerm.repository.repo_name
628 630 p = perm.Permission.permission_name
629 631 pattern = perm.UserToRepoBranchPermission.branch_pattern
630 632 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
631 633
632 634 if not self.explicit:
633 635 cur_perm = self.permissions_repository_branches.get(r_k)
634 636 if cur_perm:
635 637 cur_perm = cur_perm[pattern]
636 638 cur_perm = cur_perm or 'branch.none'
637 639
638 640 p = self._choose_permission(p, cur_perm)
639 641
640 642 # NOTE(marcink): register all pattern/perm instances in this
641 643 # special dict that aggregates entries
642 644 self.permissions_repository_branches[r_k] = pattern, p, o
643 645
644 646 def _calculate_default_permissions_repository_groups(self, user_inherit_object_permissions):
645 647 for perm in self.default_repo_groups_perms:
646 648 rg_k = perm.UserRepoGroupToPerm.group.group_name
647 649 obj_id = perm.UserRepoGroupToPerm.group.group_id
648 650 p = perm.Permission.permission_name
649 651 o = PermOrigin.REPOGROUP_DEFAULT
650 652 self.permissions_repository_groups[rg_k] = p, o, obj_id
651 653
652 654 # if we decide this user isn't inheriting permissions from default
653 655 # user we set him to .none so only explicit permissions work
654 656 if not user_inherit_object_permissions:
655 657 p = 'group.none'
656 658 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
657 659 self.permissions_repository_groups[rg_k] = p, o, obj_id
658 660
659 661 if perm.RepoGroup.user_id == self.user_id:
660 662 # set admin if owner
661 663 p = 'group.admin'
662 664 o = PermOrigin.REPOGROUP_OWNER
663 665 self.permissions_repository_groups[rg_k] = p, o, obj_id
664 666
665 667 if self.user_is_admin:
666 668 p = 'group.admin'
667 669 o = PermOrigin.SUPER_ADMIN
668 670 self.permissions_repository_groups[rg_k] = p, o, obj_id
669 671
670 672 def _calculate_default_permissions_user_groups(self, user_inherit_object_permissions):
671 673 for perm in self.default_user_group_perms:
672 674 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
673 675 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
674 676 p = perm.Permission.permission_name
675 677 o = PermOrigin.USERGROUP_DEFAULT
676 678 self.permissions_user_groups[u_k] = p, o, obj_id
677 679
678 680 # if we decide this user isn't inheriting permissions from default
679 681 # user we set him to .none so only explicit permissions work
680 682 if not user_inherit_object_permissions:
681 683 p = 'usergroup.none'
682 684 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
683 685 self.permissions_user_groups[u_k] = p, o, obj_id
684 686
685 687 if perm.UserGroup.user_id == self.user_id:
686 688 # set admin if owner
687 689 p = 'usergroup.admin'
688 690 o = PermOrigin.USERGROUP_OWNER
689 691 self.permissions_user_groups[u_k] = p, o, obj_id
690 692
691 693 if self.user_is_admin:
692 694 p = 'usergroup.admin'
693 695 o = PermOrigin.SUPER_ADMIN
694 696 self.permissions_user_groups[u_k] = p, o, obj_id
695 697
696 698 def _calculate_default_permissions(self):
697 699 """
698 700 Set default user permissions for repositories, repository branches,
699 701 repository groups, user groups taken from the default user.
700 702
701 703 Calculate inheritance of object permissions based on what we have now
702 704 in GLOBAL permissions. We check if .false is in GLOBAL since this is
703 705 explicitly set. Inherit is the opposite of .false being there.
704 706
705 707 .. note::
706 708
707 709 the syntax is little bit odd but what we need to check here is
708 710 the opposite of .false permission being in the list so even for
709 711 inconsistent state when both .true/.false is there
710 712 .false is more important
711 713
712 714 """
713 715 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
714 716 in self.permissions_global)
715 717
716 718 # default permissions inherited from `default` user permissions
717 719 self._calculate_default_permissions_repositories(
718 720 user_inherit_object_permissions)
719 721
720 722 self._calculate_default_permissions_repository_branches(
721 723 user_inherit_object_permissions)
722 724
723 725 self._calculate_default_permissions_repository_groups(
724 726 user_inherit_object_permissions)
725 727
726 728 self._calculate_default_permissions_user_groups(
727 729 user_inherit_object_permissions)
728 730
729 731 def _calculate_repository_permissions(self):
730 732 """
731 733 Repository access permissions for the current user.
732 734
733 735 Check if the user is part of user groups for this repository and
734 736 fill in the permission from it. `_choose_permission` decides of which
735 737 permission should be selected based on selected method.
736 738 """
737 739
738 740 # user group for repositories permissions
739 741 user_repo_perms_from_user_group = Permission\
740 742 .get_default_repo_perms_from_user_group(
741 743 self.user_id, self.scope_repo_id)
742 744
743 745 multiple_counter = collections.defaultdict(int)
744 746 for perm in user_repo_perms_from_user_group:
745 747 r_k = perm.UserGroupRepoToPerm.repository.repo_name
746 748 obj_id = perm.UserGroupRepoToPerm.repository.repo_id
747 749 multiple_counter[r_k] += 1
748 750 p = perm.Permission.permission_name
749 751 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
750 752 .users_group.users_group_name
751 753
752 754 if multiple_counter[r_k] > 1:
753 755 cur_perm = self.permissions_repositories[r_k]
754 756 p = self._choose_permission(p, cur_perm)
755 757
756 758 self.permissions_repositories[r_k] = p, o, obj_id
757 759
758 760 if perm.Repository.user_id == self.user_id:
759 761 # set admin if owner
760 762 p = 'repository.admin'
761 763 o = PermOrigin.REPO_OWNER
762 764 self.permissions_repositories[r_k] = p, o, obj_id
763 765
764 766 if self.user_is_admin:
765 767 p = 'repository.admin'
766 768 o = PermOrigin.SUPER_ADMIN
767 769 self.permissions_repositories[r_k] = p, o, obj_id
768 770
769 771 # user explicit permissions for repositories, overrides any specified
770 772 # by the group permission
771 773 user_repo_perms = Permission.get_default_repo_perms(
772 774 self.user_id, self.scope_repo_id)
773 775 for perm in user_repo_perms:
774 776 r_k = perm.UserRepoToPerm.repository.repo_name
775 777 obj_id = perm.UserRepoToPerm.repository.repo_id
776 778 archived = perm.UserRepoToPerm.repository.archived
777 779 p = perm.Permission.permission_name
778 780 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
779 781
780 782 if not self.explicit:
781 783 cur_perm = self.permissions_repositories.get(
782 784 r_k, 'repository.none')
783 785 p = self._choose_permission(p, cur_perm)
784 786
785 787 self.permissions_repositories[r_k] = p, o, obj_id
786 788
787 789 if perm.Repository.user_id == self.user_id:
788 790 # set admin if owner
789 791 p = 'repository.admin'
790 792 o = PermOrigin.REPO_OWNER
791 793 self.permissions_repositories[r_k] = p, o, obj_id
792 794
793 795 if self.user_is_admin:
794 796 p = 'repository.admin'
795 797 o = PermOrigin.SUPER_ADMIN
796 798 self.permissions_repositories[r_k] = p, o, obj_id
797 799
798 800 # finally in case of archived repositories, we downgrade higher
799 801 # permissions to read
800 802 if archived:
801 803 current_perm = self.permissions_repositories[r_k]
802 804 if current_perm in ['repository.write', 'repository.admin']:
803 805 p = 'repository.read'
804 806 o = PermOrigin.ARCHIVED
805 807 self.permissions_repositories[r_k] = p, o, obj_id
806 808
807 809 def _calculate_repository_branch_permissions(self):
808 810 # user group for repositories permissions
809 811 user_repo_branch_perms_from_user_group = Permission\
810 812 .get_default_repo_branch_perms_from_user_group(
811 813 self.user_id, self.scope_repo_id)
812 814
813 815 multiple_counter = collections.defaultdict(int)
814 816 for perm in user_repo_branch_perms_from_user_group:
815 817 r_k = perm.UserGroupRepoToPerm.repository.repo_name
816 818 p = perm.Permission.permission_name
817 819 pattern = perm.UserGroupToRepoBranchPermission.branch_pattern
818 820 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
819 821 .users_group.users_group_name
820 822
821 823 multiple_counter[r_k] += 1
822 824 if multiple_counter[r_k] > 1:
823 825 cur_perm = self.permissions_repository_branches[r_k][pattern]
824 826 p = self._choose_permission(p, cur_perm)
825 827
826 828 self.permissions_repository_branches[r_k] = pattern, p, o
827 829
828 830 # user explicit branch permissions for repositories, overrides
829 831 # any specified by the group permission
830 832 user_repo_branch_perms = Permission.get_default_repo_branch_perms(
831 833 self.user_id, self.scope_repo_id)
832 834
833 835 for perm in user_repo_branch_perms:
834 836
835 837 r_k = perm.UserRepoToPerm.repository.repo_name
836 838 p = perm.Permission.permission_name
837 839 pattern = perm.UserToRepoBranchPermission.branch_pattern
838 840 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
839 841
840 842 if not self.explicit:
841 843 cur_perm = self.permissions_repository_branches.get(r_k)
842 844 if cur_perm:
843 845 cur_perm = cur_perm[pattern]
844 846 cur_perm = cur_perm or 'branch.none'
845 847 p = self._choose_permission(p, cur_perm)
846 848
847 849 # NOTE(marcink): register all pattern/perm instances in this
848 850 # special dict that aggregates entries
849 851 self.permissions_repository_branches[r_k] = pattern, p, o
850 852
851 853 def _calculate_repository_group_permissions(self):
852 854 """
853 855 Repository group permissions for the current user.
854 856
855 857 Check if the user is part of user groups for repository groups and
856 858 fill in the permissions from it. `_choose_permission` decides of which
857 859 permission should be selected based on selected method.
858 860 """
859 861 # user group for repo groups permissions
860 862 user_repo_group_perms_from_user_group = Permission\
861 863 .get_default_group_perms_from_user_group(
862 864 self.user_id, self.scope_repo_group_id)
863 865
864 866 multiple_counter = collections.defaultdict(int)
865 867 for perm in user_repo_group_perms_from_user_group:
866 868 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
867 869 obj_id = perm.UserGroupRepoGroupToPerm.group.group_id
868 870 multiple_counter[rg_k] += 1
869 871 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
870 872 .users_group.users_group_name
871 873 p = perm.Permission.permission_name
872 874
873 875 if multiple_counter[rg_k] > 1:
874 876 cur_perm = self.permissions_repository_groups[rg_k]
875 877 p = self._choose_permission(p, cur_perm)
876 878 self.permissions_repository_groups[rg_k] = p, o, obj_id
877 879
878 880 if perm.RepoGroup.user_id == self.user_id:
879 881 # set admin if owner, even for member of other user group
880 882 p = 'group.admin'
881 883 o = PermOrigin.REPOGROUP_OWNER
882 884 self.permissions_repository_groups[rg_k] = p, o, obj_id
883 885
884 886 if self.user_is_admin:
885 887 p = 'group.admin'
886 888 o = PermOrigin.SUPER_ADMIN
887 889 self.permissions_repository_groups[rg_k] = p, o, obj_id
888 890
889 891 # user explicit permissions for repository groups
890 892 user_repo_groups_perms = Permission.get_default_group_perms(
891 893 self.user_id, self.scope_repo_group_id)
892 894 for perm in user_repo_groups_perms:
893 895 rg_k = perm.UserRepoGroupToPerm.group.group_name
894 896 obj_id = perm.UserRepoGroupToPerm.group.group_id
895 897 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
896 898 .user.username
897 899 p = perm.Permission.permission_name
898 900
899 901 if not self.explicit:
900 902 cur_perm = self.permissions_repository_groups.get(rg_k, 'group.none')
901 903 p = self._choose_permission(p, cur_perm)
902 904
903 905 self.permissions_repository_groups[rg_k] = p, o, obj_id
904 906
905 907 if perm.RepoGroup.user_id == self.user_id:
906 908 # set admin if owner
907 909 p = 'group.admin'
908 910 o = PermOrigin.REPOGROUP_OWNER
909 911 self.permissions_repository_groups[rg_k] = p, o, obj_id
910 912
911 913 if self.user_is_admin:
912 914 p = 'group.admin'
913 915 o = PermOrigin.SUPER_ADMIN
914 916 self.permissions_repository_groups[rg_k] = p, o, obj_id
915 917
916 918 def _calculate_user_group_permissions(self):
917 919 """
918 920 User group permissions for the current user.
919 921 """
920 922 # user group for user group permissions
921 923 user_group_from_user_group = Permission\
922 924 .get_default_user_group_perms_from_user_group(
923 925 self.user_id, self.scope_user_group_id)
924 926
925 927 multiple_counter = collections.defaultdict(int)
926 928 for perm in user_group_from_user_group:
927 929 ug_k = perm.UserGroupUserGroupToPerm.target_user_group.users_group_name
928 930 obj_id = perm.UserGroupUserGroupToPerm.target_user_group.users_group_id
929 931 multiple_counter[ug_k] += 1
930 932 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
931 933 .user_group.users_group_name
932 934 p = perm.Permission.permission_name
933 935
934 936 if multiple_counter[ug_k] > 1:
935 937 cur_perm = self.permissions_user_groups[ug_k]
936 938 p = self._choose_permission(p, cur_perm)
937 939
938 940 self.permissions_user_groups[ug_k] = p, o, obj_id
939 941
940 942 if perm.UserGroup.user_id == self.user_id:
941 943 # set admin if owner, even for member of other user group
942 944 p = 'usergroup.admin'
943 945 o = PermOrigin.USERGROUP_OWNER
944 946 self.permissions_user_groups[ug_k] = p, o, obj_id
945 947
946 948 if self.user_is_admin:
947 949 p = 'usergroup.admin'
948 950 o = PermOrigin.SUPER_ADMIN
949 951 self.permissions_user_groups[ug_k] = p, o, obj_id
950 952
951 953 # user explicit permission for user groups
952 954 user_user_groups_perms = Permission.get_default_user_group_perms(
953 955 self.user_id, self.scope_user_group_id)
954 956 for perm in user_user_groups_perms:
955 957 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
956 958 obj_id = perm.UserUserGroupToPerm.user_group.users_group_id
957 959 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
958 960 .user.username
959 961 p = perm.Permission.permission_name
960 962
961 963 if not self.explicit:
962 964 cur_perm = self.permissions_user_groups.get(ug_k, 'usergroup.none')
963 965 p = self._choose_permission(p, cur_perm)
964 966
965 967 self.permissions_user_groups[ug_k] = p, o, obj_id
966 968
967 969 if perm.UserGroup.user_id == self.user_id:
968 970 # set admin if owner
969 971 p = 'usergroup.admin'
970 972 o = PermOrigin.USERGROUP_OWNER
971 973 self.permissions_user_groups[ug_k] = p, o, obj_id
972 974
973 975 if self.user_is_admin:
974 976 p = 'usergroup.admin'
975 977 o = PermOrigin.SUPER_ADMIN
976 978 self.permissions_user_groups[ug_k] = p, o, obj_id
977 979
978 980 def _choose_permission(self, new_perm, cur_perm):
979 981 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
980 982 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
981 983 if self.algo == 'higherwin':
982 984 if new_perm_val > cur_perm_val:
983 985 return new_perm
984 986 return cur_perm
985 987 elif self.algo == 'lowerwin':
986 988 if new_perm_val < cur_perm_val:
987 989 return new_perm
988 990 return cur_perm
989 991
990 992 def _permission_structure(self):
991 993 return {
992 994 'global': self.permissions_global,
993 995 'repositories': self.permissions_repositories,
994 996 'repository_branches': self.permissions_repository_branches,
995 997 'repositories_groups': self.permissions_repository_groups,
996 998 'user_groups': self.permissions_user_groups,
997 999 }
998 1000
999 1001
1000 1002 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
1001 1003 """
1002 1004 Check if given controller_name is in whitelist of auth token access
1003 1005 """
1004 1006 if not whitelist:
1005 1007 from rhodecode import CONFIG
1006 1008 whitelist = aslist(
1007 1009 CONFIG.get('api_access_controllers_whitelist'), sep=',')
1008 1010 # backward compat translation
1009 1011 compat = {
1010 1012 # old controller, new VIEW
1011 1013 'ChangesetController:*': 'RepoCommitsView:*',
1012 1014 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
1013 1015 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
1014 1016 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
1015 1017 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
1016 1018 'GistsController:*': 'GistView:*',
1017 1019 }
1018 1020
1019 1021 log.debug(
1020 1022 'Allowed views for AUTH TOKEN access: %s', whitelist)
1021 1023 auth_token_access_valid = False
1022 1024
1023 1025 for entry in whitelist:
1024 1026 token_match = True
1025 1027 if entry in compat:
1026 1028 # translate from old Controllers to Pyramid Views
1027 1029 entry = compat[entry]
1028 1030
1029 1031 if '@' in entry:
1030 1032 # specific AuthToken
1031 1033 entry, allowed_token = entry.split('@', 1)
1032 1034 token_match = auth_token == allowed_token
1033 1035
1034 1036 if fnmatch.fnmatch(view_name, entry) and token_match:
1035 1037 auth_token_access_valid = True
1036 1038 break
1037 1039
1038 1040 if auth_token_access_valid:
1039 1041 log.debug('view: `%s` matches entry in whitelist: %s',
1040 1042 view_name, whitelist)
1041 1043
1042 1044 else:
1043 1045 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
1044 1046 % (view_name, whitelist))
1045 1047 if auth_token:
1046 1048 # if we use auth token key and don't have access it's a warning
1047 1049 log.warning(msg)
1048 1050 else:
1049 1051 log.debug(msg)
1050 1052
1051 1053 return auth_token_access_valid
1052 1054
1053 1055
1054 1056 class AuthUser(object):
1055 1057 """
1056 1058 A simple object that handles all attributes of user in RhodeCode
1057 1059
1058 1060 It does lookup based on API key,given user, or user present in session
1059 1061 Then it fills all required information for such user. It also checks if
1060 1062 anonymous access is enabled and if so, it returns default user as logged in
1061 1063 """
1062 1064 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
1063 1065 repo_read_perms = ['repository.read', 'repository.admin', 'repository.write']
1064 1066 repo_group_read_perms = ['group.read', 'group.write', 'group.admin']
1065 1067 user_group_read_perms = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
1066 1068
1067 1069 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
1068 1070
1069 1071 self.user_id = user_id
1070 1072 self._api_key = api_key
1071 1073
1072 1074 self.api_key = None
1073 1075 self.username = username
1074 1076 self.ip_addr = ip_addr
1075 1077 self.name = ''
1076 1078 self.lastname = ''
1077 1079 self.first_name = ''
1078 1080 self.last_name = ''
1079 1081 self.email = ''
1080 1082 self.is_authenticated = False
1081 1083 self.admin = False
1082 1084 self.inherit_default_permissions = False
1083 1085 self.password = ''
1084 1086
1085 1087 self.anonymous_user = None # propagated on propagate_data
1086 1088 self.propagate_data()
1087 1089 self._instance = None
1088 1090 self._permissions_scoped_cache = {} # used to bind scoped calculation
1089 1091
1090 1092 @LazyProperty
1091 1093 def permissions(self):
1092 1094 return self.get_perms(user=self, cache=None)
1093 1095
1094 1096 @LazyProperty
1095 1097 def permissions_safe(self):
1096 1098 """
1097 1099 Filtered permissions excluding not allowed repositories
1098 1100 """
1099 1101 perms = self.get_perms(user=self, cache=None)
1100 1102
1101 1103 perms['repositories'] = {
1102 1104 k: v for k, v in perms['repositories'].items()
1103 1105 if v != 'repository.none'}
1104 1106 perms['repositories_groups'] = {
1105 1107 k: v for k, v in perms['repositories_groups'].items()
1106 1108 if v != 'group.none'}
1107 1109 perms['user_groups'] = {
1108 1110 k: v for k, v in perms['user_groups'].items()
1109 1111 if v != 'usergroup.none'}
1110 1112 perms['repository_branches'] = {
1111 1113 k: v for k, v in perms['repository_branches'].iteritems()
1112 1114 if v != 'branch.none'}
1113 1115 return perms
1114 1116
1115 1117 @LazyProperty
1116 1118 def permissions_full_details(self):
1117 1119 return self.get_perms(
1118 1120 user=self, cache=None, calculate_super_admin=True)
1119 1121
1120 1122 def permissions_with_scope(self, scope):
1121 1123 """
1122 1124 Call the get_perms function with scoped data. The scope in that function
1123 1125 narrows the SQL calls to the given ID of objects resulting in fetching
1124 1126 Just particular permission we want to obtain. If scope is an empty dict
1125 1127 then it basically narrows the scope to GLOBAL permissions only.
1126 1128
1127 1129 :param scope: dict
1128 1130 """
1129 1131 if 'repo_name' in scope:
1130 1132 obj = Repository.get_by_repo_name(scope['repo_name'])
1131 1133 if obj:
1132 1134 scope['repo_id'] = obj.repo_id
1133 1135 _scope = collections.OrderedDict()
1134 1136 _scope['repo_id'] = -1
1135 1137 _scope['user_group_id'] = -1
1136 1138 _scope['repo_group_id'] = -1
1137 1139
1138 1140 for k in sorted(scope.keys()):
1139 1141 _scope[k] = scope[k]
1140 1142
1141 1143 # store in cache to mimic how the @LazyProperty works,
1142 1144 # the difference here is that we use the unique key calculated
1143 1145 # from params and values
1144 1146 return self.get_perms(user=self, cache=None, scope=_scope)
1145 1147
1146 1148 def get_instance(self):
1147 1149 return User.get(self.user_id)
1148 1150
1149 1151 def propagate_data(self):
1150 1152 """
1151 1153 Fills in user data and propagates values to this instance. Maps fetched
1152 1154 user attributes to this class instance attributes
1153 1155 """
1154 1156 log.debug('AuthUser: starting data propagation for new potential user')
1155 1157 user_model = UserModel()
1156 1158 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1157 1159 is_user_loaded = False
1158 1160
1159 1161 # lookup by userid
1160 1162 if self.user_id is not None and self.user_id != anon_user.user_id:
1161 1163 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1162 1164 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1163 1165
1164 1166 # try go get user by api key
1165 1167 elif self._api_key and self._api_key != anon_user.api_key:
1166 1168 log.debug('Trying Auth User lookup by API KEY: `...%s`', self._api_key[-4:])
1167 1169 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1168 1170
1169 1171 # lookup by username
1170 1172 elif self.username:
1171 1173 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1172 1174 is_user_loaded = user_model.fill_data(self, username=self.username)
1173 1175 else:
1174 1176 log.debug('No data in %s that could been used to log in', self)
1175 1177
1176 1178 if not is_user_loaded:
1177 1179 log.debug(
1178 1180 'Failed to load user. Fallback to default user %s', anon_user)
1179 1181 # if we cannot authenticate user try anonymous
1180 1182 if anon_user.active:
1181 1183 log.debug('default user is active, using it as a session user')
1182 1184 user_model.fill_data(self, user_id=anon_user.user_id)
1183 1185 # then we set this user is logged in
1184 1186 self.is_authenticated = True
1185 1187 else:
1186 1188 log.debug('default user is NOT active')
1187 1189 # in case of disabled anonymous user we reset some of the
1188 1190 # parameters so such user is "corrupted", skipping the fill_data
1189 1191 for attr in ['user_id', 'username', 'admin', 'active']:
1190 1192 setattr(self, attr, None)
1191 1193 self.is_authenticated = False
1192 1194
1193 1195 if not self.username:
1194 1196 self.username = 'None'
1195 1197
1196 1198 log.debug('AuthUser: propagated user is now %s', self)
1197 1199
1198 1200 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1199 1201 calculate_super_admin=False, cache=None):
1200 1202 """
1201 1203 Fills user permission attribute with permissions taken from database
1202 1204 works for permissions given for repositories, and for permissions that
1203 1205 are granted to groups
1204 1206
1205 1207 :param user: instance of User object from database
1206 1208 :param explicit: In case there are permissions both for user and a group
1207 1209 that user is part of, explicit flag will defiine if user will
1208 1210 explicitly override permissions from group, if it's False it will
1209 1211 make decision based on the algo
1210 1212 :param algo: algorithm to decide what permission should be choose if
1211 1213 it's multiple defined, eg user in two different groups. It also
1212 1214 decides if explicit flag is turned off how to specify the permission
1213 1215 for case when user is in a group + have defined separate permission
1214 1216 :param calculate_super_admin: calculate permissions for super-admin in the
1215 1217 same way as for regular user without speedups
1216 1218 :param cache: Use caching for calculation, None = let the cache backend decide
1217 1219 """
1218 1220 user_id = user.user_id
1219 1221 user_is_admin = user.is_admin
1220 1222
1221 1223 # inheritance of global permissions like create repo/fork repo etc
1222 1224 user_inherit_default_permissions = user.inherit_default_permissions
1223 1225
1224 1226 cache_seconds = safe_int(
1225 1227 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1226 1228
1227 1229 if cache is None:
1228 1230 # let the backend cache decide
1229 1231 cache_on = cache_seconds > 0
1230 1232 else:
1231 1233 cache_on = cache
1232 1234
1233 1235 log.debug(
1234 1236 'Computing PERMISSION tree for user %s scope `%s` '
1235 1237 'with caching: %s[TTL: %ss]', user, scope, cache_on, cache_seconds or 0)
1236 1238
1237 1239 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1238 1240 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1239 1241
1240 1242 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1241 1243 condition=cache_on)
1242 1244 def compute_perm_tree(cache_name, cache_ver,
1243 1245 user_id, scope, user_is_admin,user_inherit_default_permissions,
1244 1246 explicit, algo, calculate_super_admin):
1245 1247 return _cached_perms_data(
1246 1248 user_id, scope, user_is_admin, user_inherit_default_permissions,
1247 1249 explicit, algo, calculate_super_admin)
1248 1250
1249 1251 start = time.time()
1250 1252 result = compute_perm_tree(
1251 1253 'permissions', 'v1', user_id, scope, user_is_admin,
1252 1254 user_inherit_default_permissions, explicit, algo,
1253 1255 calculate_super_admin)
1254 1256
1255 1257 result_repr = []
1256 1258 for k in result:
1257 1259 result_repr.append((k, len(result[k])))
1258 1260 total = time.time() - start
1259 1261 log.debug('PERMISSION tree for user %s computed in %.4fs: %s',
1260 1262 user, total, result_repr)
1261 1263
1262 1264 return result
1263 1265
1264 1266 @property
1265 1267 def is_default(self):
1266 1268 return self.username == User.DEFAULT_USER
1267 1269
1268 1270 @property
1269 1271 def is_admin(self):
1270 1272 return self.admin
1271 1273
1272 1274 @property
1273 1275 def is_user_object(self):
1274 1276 return self.user_id is not None
1275 1277
1276 1278 @property
1277 1279 def repositories_admin(self):
1278 1280 """
1279 1281 Returns list of repositories you're an admin of
1280 1282 """
1281 1283 return [
1282 1284 x[0] for x in self.permissions['repositories'].items()
1283 1285 if x[1] == 'repository.admin']
1284 1286
1285 1287 @property
1286 1288 def repository_groups_admin(self):
1287 1289 """
1288 1290 Returns list of repository groups you're an admin of
1289 1291 """
1290 1292 return [
1291 1293 x[0] for x in self.permissions['repositories_groups'].items()
1292 1294 if x[1] == 'group.admin']
1293 1295
1294 1296 @property
1295 1297 def user_groups_admin(self):
1296 1298 """
1297 1299 Returns list of user groups you're an admin of
1298 1300 """
1299 1301 return [
1300 1302 x[0] for x in self.permissions['user_groups'].items()
1301 1303 if x[1] == 'usergroup.admin']
1302 1304
1303 1305 def repo_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1304 1306 if not perms:
1305 1307 perms = AuthUser.repo_read_perms
1306 1308 allowed_ids = []
1307 1309 for k, stack_data in self.permissions['repositories'].perm_origin_stack.items():
1308 1310 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1309 1311 if prefix_filter and not k.startswith(prefix_filter):
1310 1312 continue
1311 1313 if perm in perms:
1312 1314 allowed_ids.append(obj_id)
1313 1315 return allowed_ids
1314 1316
1315 1317 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1316 1318 """
1317 1319 Returns list of repository ids that user have access to based on given
1318 1320 perms. The cache flag should be only used in cases that are used for
1319 1321 display purposes, NOT IN ANY CASE for permission checks.
1320 1322 """
1321 1323 from rhodecode.model.scm import RepoList
1322 1324 if not perms:
1323 1325 perms = AuthUser.repo_read_perms
1324 1326
1325 1327 if not isinstance(perms, list):
1326 1328 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1327 1329
1328 1330 def _cached_repo_acl(perm_def, _name_filter):
1329 1331 qry = Repository.query()
1330 1332 if _name_filter:
1331 1333 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1332 1334 qry = qry.filter(
1333 1335 Repository.repo_name.ilike(ilike_expression))
1334 1336
1335 1337 return [x.repo_id for x in
1336 1338 RepoList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1337 1339
1338 1340 log.debug('Computing REPO ACL IDS user %s', self)
1339 1341
1340 1342 cache_namespace_uid = 'cache_user_repo_acl_ids.{}'.format(self.user_id)
1341 1343 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1342 1344
1343 1345 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1344 1346 def compute_repo_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1345 1347 return _cached_repo_acl(perm_def, _name_filter)
1346 1348
1347 1349 start = time.time()
1348 1350 result = compute_repo_acl_ids('v1', self.user_id, perms, name_filter)
1349 1351 total = time.time() - start
1350 1352 log.debug('REPO ACL IDS for user %s computed in %.4fs', self, total)
1351 1353
1352 1354 return result
1353 1355
1354 1356 def repo_group_acl_ids_from_stack(self, perms=None, prefix_filter=None, cache=False):
1355 1357 if not perms:
1356 1358 perms = AuthUser.repo_group_read_perms
1357 1359 allowed_ids = []
1358 1360 for k, stack_data in self.permissions['repositories_groups'].perm_origin_stack.items():
1359 1361 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1360 1362 if prefix_filter and not k.startswith(prefix_filter):
1361 1363 continue
1362 1364 if perm in perms:
1363 1365 allowed_ids.append(obj_id)
1364 1366 return allowed_ids
1365 1367
1366 1368 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1367 1369 """
1368 1370 Returns list of repository group ids that user have access to based on given
1369 1371 perms. The cache flag should be only used in cases that are used for
1370 1372 display purposes, NOT IN ANY CASE for permission checks.
1371 1373 """
1372 1374 from rhodecode.model.scm import RepoGroupList
1373 1375 if not perms:
1374 1376 perms = AuthUser.repo_group_read_perms
1375 1377
1376 1378 if not isinstance(perms, list):
1377 1379 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1378 1380
1379 1381 def _cached_repo_group_acl(perm_def, _name_filter):
1380 1382 qry = RepoGroup.query()
1381 1383 if _name_filter:
1382 1384 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1383 1385 qry = qry.filter(
1384 1386 RepoGroup.group_name.ilike(ilike_expression))
1385 1387
1386 1388 return [x.group_id for x in
1387 1389 RepoGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1388 1390
1389 1391 log.debug('Computing REPO GROUP ACL IDS user %s', self)
1390 1392
1391 1393 cache_namespace_uid = 'cache_user_repo_group_acl_ids.{}'.format(self.user_id)
1392 1394 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1393 1395
1394 1396 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1395 1397 def compute_repo_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1396 1398 return _cached_repo_group_acl(perm_def, _name_filter)
1397 1399
1398 1400 start = time.time()
1399 1401 result = compute_repo_group_acl_ids('v1', self.user_id, perms, name_filter)
1400 1402 total = time.time() - start
1401 1403 log.debug('REPO GROUP ACL IDS for user %s computed in %.4fs', self, total)
1402 1404
1403 1405 return result
1404 1406
1405 1407 def user_group_acl_ids_from_stack(self, perms=None, cache=False):
1406 1408 if not perms:
1407 1409 perms = AuthUser.user_group_read_perms
1408 1410 allowed_ids = []
1409 1411 for k, stack_data in self.permissions['user_groups'].perm_origin_stack.items():
1410 1412 perm, origin, obj_id = stack_data[-1] # last item is the current permission
1411 1413 if perm in perms:
1412 1414 allowed_ids.append(obj_id)
1413 1415 return allowed_ids
1414 1416
1415 1417 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1416 1418 """
1417 1419 Returns list of user group ids that user have access to based on given
1418 1420 perms. The cache flag should be only used in cases that are used for
1419 1421 display purposes, NOT IN ANY CASE for permission checks.
1420 1422 """
1421 1423 from rhodecode.model.scm import UserGroupList
1422 1424 if not perms:
1423 1425 perms = AuthUser.user_group_read_perms
1424 1426
1425 1427 if not isinstance(perms, list):
1426 1428 raise ValueError('perms parameter must be a list got {} instead'.format(perms))
1427 1429
1428 1430 def _cached_user_group_acl(perm_def, _name_filter):
1429 1431 qry = UserGroup.query()
1430 1432 if _name_filter:
1431 1433 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1432 1434 qry = qry.filter(
1433 1435 UserGroup.users_group_name.ilike(ilike_expression))
1434 1436
1435 1437 return [x.users_group_id for x in
1436 1438 UserGroupList(qry, perm_set=perm_def, extra_kwargs={'user': self})]
1437 1439
1438 1440 log.debug('Computing USER GROUP ACL IDS user %s', self)
1439 1441
1440 1442 cache_namespace_uid = 'cache_user_user_group_acl_ids.{}'.format(self.user_id)
1441 1443 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1442 1444
1443 1445 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid, condition=cache)
1444 1446 def compute_user_group_acl_ids(cache_ver, user_id, perm_def, _name_filter):
1445 1447 return _cached_user_group_acl(perm_def, _name_filter)
1446 1448
1447 1449 start = time.time()
1448 1450 result = compute_user_group_acl_ids('v1', self.user_id, perms, name_filter)
1449 1451 total = time.time() - start
1450 1452 log.debug('USER GROUP ACL IDS for user %s computed in %.4fs', self, total)
1451 1453
1452 1454 return result
1453 1455
1454 1456 @property
1455 1457 def ip_allowed(self):
1456 1458 """
1457 1459 Checks if ip_addr used in constructor is allowed from defined list of
1458 1460 allowed ip_addresses for user
1459 1461
1460 1462 :returns: boolean, True if ip is in allowed ip range
1461 1463 """
1462 1464 # check IP
1463 1465 inherit = self.inherit_default_permissions
1464 1466 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1465 1467 inherit_from_default=inherit)
1466 1468
1467 1469 @property
1468 1470 def personal_repo_group(self):
1469 1471 return RepoGroup.get_user_personal_repo_group(self.user_id)
1470 1472
1471 1473 @LazyProperty
1472 1474 def feed_token(self):
1473 1475 return self.get_instance().feed_token
1474 1476
1475 1477 @LazyProperty
1476 1478 def artifact_token(self):
1477 1479 return self.get_instance().artifact_token
1478 1480
1479 1481 @classmethod
1480 1482 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1481 1483 allowed_ips = AuthUser.get_allowed_ips(
1482 1484 user_id, cache=True, inherit_from_default=inherit_from_default)
1483 1485 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1484 1486 log.debug('IP:%s for user %s is in range of %s',
1485 1487 ip_addr, user_id, allowed_ips)
1486 1488 return True
1487 1489 else:
1488 1490 log.info('Access for IP:%s forbidden for user %s, '
1489 1491 'not in %s', ip_addr, user_id, allowed_ips)
1490 1492 return False
1491 1493
1492 1494 def get_branch_permissions(self, repo_name, perms=None):
1493 1495 perms = perms or self.permissions_with_scope({'repo_name': repo_name})
1494 1496 branch_perms = perms.get('repository_branches', {})
1495 1497 if not branch_perms:
1496 1498 return {}
1497 1499 repo_branch_perms = branch_perms.get(repo_name)
1498 1500 return repo_branch_perms or {}
1499 1501
1500 1502 def get_rule_and_branch_permission(self, repo_name, branch_name):
1501 1503 """
1502 1504 Check if this AuthUser has defined any permissions for branches. If any of
1503 1505 the rules match in order, we return the matching permissions
1504 1506 """
1505 1507
1506 1508 rule = default_perm = ''
1507 1509
1508 1510 repo_branch_perms = self.get_branch_permissions(repo_name=repo_name)
1509 1511 if not repo_branch_perms:
1510 1512 return rule, default_perm
1511 1513
1512 1514 # now calculate the permissions
1513 1515 for pattern, branch_perm in repo_branch_perms.items():
1514 1516 if fnmatch.fnmatch(branch_name, pattern):
1515 1517 rule = '`{}`=>{}'.format(pattern, branch_perm)
1516 1518 return rule, branch_perm
1517 1519
1518 1520 return rule, default_perm
1519 1521
1520 1522 def get_notice_messages(self):
1521 1523
1522 1524 notice_level = 'notice-error'
1523 1525 notice_messages = []
1524 1526 if self.is_default:
1525 1527 return [], notice_level
1526 1528
1527 1529 notices = UserNotice.query()\
1528 1530 .filter(UserNotice.user_id == self.user_id)\
1529 1531 .filter(UserNotice.notice_read == false())\
1530 1532 .all()
1531 1533
1532 1534 try:
1533 1535 for entry in notices:
1534 1536
1535 1537 msg = {
1536 1538 'msg_id': entry.user_notice_id,
1537 1539 'level': entry.notification_level,
1538 1540 'subject': entry.notice_subject,
1539 1541 'body': entry.notice_body,
1540 1542 }
1541 1543 notice_messages.append(msg)
1542 1544
1543 1545 log.debug('Got user %s %s messages', self, len(notice_messages))
1544 1546
1545 1547 levels = [x['level'] for x in notice_messages]
1546 1548 notice_level = 'notice-error' if 'error' in levels else 'notice-warning'
1547 1549 except Exception:
1548 1550 pass
1549 1551
1550 1552 return notice_messages, notice_level
1551 1553
1552 1554 def __repr__(self):
1553 1555 return self.repr_user(self.user_id, self.username, self.ip_addr, self.is_authenticated)
1554 1556
1555 1557 def set_authenticated(self, authenticated=True):
1556 1558 if self.user_id != self.anonymous_user.user_id:
1557 1559 self.is_authenticated = authenticated
1558 1560
1559 1561 def get_cookie_store(self):
1560 1562 return {
1561 1563 'username': self.username,
1562 1564 'password': md5(self.password or ''),
1563 1565 'user_id': self.user_id,
1564 1566 'is_authenticated': self.is_authenticated
1565 1567 }
1566 1568
1567 1569 @classmethod
1568 1570 def repr_user(cls, user_id=0, username='ANONYMOUS', ip='0.0.0.0', is_authenticated=False):
1569 1571 tmpl = "<AuthUser('id:{}[{}] ip:{} auth:{}')>"
1570 1572 return tmpl.format(user_id, username, ip, is_authenticated)
1571 1573
1572 1574 @classmethod
1573 1575 def from_cookie_store(cls, cookie_store):
1574 1576 """
1575 1577 Creates AuthUser from a cookie store
1576 1578
1577 1579 :param cls:
1578 1580 :param cookie_store:
1579 1581 """
1580 1582 user_id = cookie_store.get('user_id')
1581 1583 username = cookie_store.get('username')
1582 1584 api_key = cookie_store.get('api_key')
1583 1585 return AuthUser(user_id, api_key, username)
1584 1586
1585 1587 @classmethod
1586 1588 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1587 1589 _set = set()
1588 1590
1589 1591 if inherit_from_default:
1590 1592 def_user_id = User.get_default_user(cache=True).user_id
1591 1593 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1592 1594 if cache:
1593 1595 default_ips = default_ips.options(
1594 1596 FromCache("sql_cache_short", "get_user_ips_default"))
1595 1597
1596 1598 # populate from default user
1597 1599 for ip in default_ips:
1598 1600 try:
1599 1601 _set.add(ip.ip_addr)
1600 1602 except ObjectDeletedError:
1601 1603 # since we use heavy caching sometimes it happens that
1602 1604 # we get deleted objects here, we just skip them
1603 1605 pass
1604 1606
1605 1607 # NOTE:(marcink) we don't want to load any rules for empty
1606 1608 # user_id which is the case of access of non logged users when anonymous
1607 1609 # access is disabled
1608 1610 user_ips = []
1609 1611 if user_id:
1610 1612 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1611 1613 if cache:
1612 1614 user_ips = user_ips.options(
1613 1615 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1614 1616
1615 1617 for ip in user_ips:
1616 1618 try:
1617 1619 _set.add(ip.ip_addr)
1618 1620 except ObjectDeletedError:
1619 1621 # since we use heavy caching sometimes it happens that we get
1620 1622 # deleted objects here, we just skip them
1621 1623 pass
1622 1624 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1623 1625
1624 1626
1625 1627 def set_available_permissions(settings):
1626 1628 """
1627 1629 This function will propagate pyramid settings with all available defined
1628 1630 permission given in db. We don't want to check each time from db for new
1629 1631 permissions since adding a new permission also requires application restart
1630 1632 ie. to decorate new views with the newly created permission
1631 1633
1632 1634 :param settings: current pyramid registry.settings
1633 1635
1634 1636 """
1635 1637 log.debug('auth: getting information about all available permissions')
1636 1638 try:
1637 1639 sa = meta.Session
1638 1640 all_perms = sa.query(Permission).all()
1639 1641 settings.setdefault('available_permissions',
1640 1642 [x.permission_name for x in all_perms])
1641 1643 log.debug('auth: set available permissions')
1642 1644 except Exception:
1643 1645 log.exception('Failed to fetch permissions from the database.')
1644 1646 raise
1645 1647
1646 1648
1647 1649 def get_csrf_token(session, force_new=False, save_if_missing=True):
1648 1650 """
1649 1651 Return the current authentication token, creating one if one doesn't
1650 1652 already exist and the save_if_missing flag is present.
1651 1653
1652 1654 :param session: pass in the pyramid session, else we use the global ones
1653 1655 :param force_new: force to re-generate the token and store it in session
1654 1656 :param save_if_missing: save the newly generated token if it's missing in
1655 1657 session
1656 1658 """
1657 1659 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1658 1660 # from pyramid.csrf import get_csrf_token
1659 1661
1660 1662 if (csrf_token_key not in session and save_if_missing) or force_new:
1661 1663 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1662 1664 session[csrf_token_key] = token
1663 1665 if hasattr(session, 'save'):
1664 1666 session.save()
1665 1667 return session.get(csrf_token_key)
1666 1668
1667 1669
1668 1670 def get_request(perm_class_instance):
1669 1671 from pyramid.threadlocal import get_current_request
1670 1672 pyramid_request = get_current_request()
1671 1673 return pyramid_request
1672 1674
1673 1675
1674 1676 # CHECK DECORATORS
1675 1677 class CSRFRequired(object):
1676 1678 """
1677 1679 Decorator for authenticating a form
1678 1680
1679 1681 This decorator uses an authorization token stored in the client's
1680 1682 session for prevention of certain Cross-site request forgery (CSRF)
1681 1683 attacks (See
1682 1684 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1683 1685 information).
1684 1686
1685 1687 For use with the ``secure_form`` helper functions.
1686 1688
1687 1689 """
1688 1690 def __init__(self, token=csrf_token_key, header='X-CSRF-Token', except_methods=None):
1689 1691 self.token = token
1690 1692 self.header = header
1691 1693 self.except_methods = except_methods or []
1692 1694
1693 1695 def __call__(self, func):
1694 1696 return get_cython_compat_decorator(self.__wrapper, func)
1695 1697
1696 1698 def _get_csrf(self, _request):
1697 1699 return _request.POST.get(self.token, _request.headers.get(self.header))
1698 1700
1699 1701 def check_csrf(self, _request, cur_token):
1700 1702 supplied_token = self._get_csrf(_request)
1701 1703 return supplied_token and supplied_token == cur_token
1702 1704
1703 1705 def _get_request(self):
1704 1706 return get_request(self)
1705 1707
1706 1708 def __wrapper(self, func, *fargs, **fkwargs):
1707 1709 request = self._get_request()
1708 1710
1709 1711 if request.method in self.except_methods:
1710 1712 return func(*fargs, **fkwargs)
1711 1713
1712 1714 cur_token = get_csrf_token(request.session, save_if_missing=False)
1713 1715 if self.check_csrf(request, cur_token):
1714 1716 if request.POST.get(self.token):
1715 1717 del request.POST[self.token]
1716 1718 return func(*fargs, **fkwargs)
1717 1719 else:
1718 1720 reason = 'token-missing'
1719 1721 supplied_token = self._get_csrf(request)
1720 1722 if supplied_token and cur_token != supplied_token:
1721 1723 reason = 'token-mismatch [%s:%s]' % (
1722 1724 cur_token or ''[:6], supplied_token or ''[:6])
1723 1725
1724 1726 csrf_message = \
1725 1727 ("Cross-site request forgery detected, request denied. See "
1726 1728 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1727 1729 "more information.")
1728 1730 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1729 1731 'REMOTE_ADDR:%s, HEADERS:%s' % (
1730 1732 request, reason, request.remote_addr, request.headers))
1731 1733
1732 1734 raise HTTPForbidden(explanation=csrf_message)
1733 1735
1734 1736
1735 1737 class LoginRequired(object):
1736 1738 """
1737 1739 Must be logged in to execute this function else
1738 1740 redirect to login page
1739 1741
1740 1742 :param api_access: if enabled this checks only for valid auth token
1741 1743 and grants access based on valid token
1742 1744 """
1743 1745 def __init__(self, auth_token_access=None):
1744 1746 self.auth_token_access = auth_token_access
1745 1747 if self.auth_token_access:
1746 1748 valid_type = set(auth_token_access).intersection(set(UserApiKeys.ROLES))
1747 1749 if not valid_type:
1748 1750 raise ValueError('auth_token_access must be on of {}, got {}'.format(
1749 1751 UserApiKeys.ROLES, auth_token_access))
1750 1752
1751 1753 def __call__(self, func):
1752 1754 return get_cython_compat_decorator(self.__wrapper, func)
1753 1755
1754 1756 def _get_request(self):
1755 1757 return get_request(self)
1756 1758
1757 1759 def __wrapper(self, func, *fargs, **fkwargs):
1758 1760 from rhodecode.lib import helpers as h
1759 1761 cls = fargs[0]
1760 1762 user = cls._rhodecode_user
1761 1763 request = self._get_request()
1762 1764 _ = request.translate
1763 1765
1764 1766 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1765 1767 log.debug('Starting login restriction checks for user: %s', user)
1766 1768 # check if our IP is allowed
1767 1769 ip_access_valid = True
1768 1770 if not user.ip_allowed:
1769 1771 h.flash(h.literal(_('IP {} not allowed'.format(user.ip_addr))),
1770 1772 category='warning')
1771 1773 ip_access_valid = False
1772 1774
1773 1775 # we used stored token that is extract from GET or URL param (if any)
1774 1776 _auth_token = request.user_auth_token
1775 1777
1776 1778 # check if we used an AUTH_TOKEN and it's a valid one
1777 1779 # defined white-list of controllers which API access will be enabled
1778 1780 whitelist = None
1779 1781 if self.auth_token_access:
1780 1782 # since this location is allowed by @LoginRequired decorator it's our
1781 1783 # only whitelist
1782 1784 whitelist = [loc]
1783 1785 auth_token_access_valid = allowed_auth_token_access(
1784 1786 loc, whitelist=whitelist, auth_token=_auth_token)
1785 1787
1786 1788 # explicit controller is enabled or API is in our whitelist
1787 1789 if auth_token_access_valid:
1788 1790 log.debug('Checking AUTH TOKEN access for %s', cls)
1789 1791 db_user = user.get_instance()
1790 1792
1791 1793 if db_user:
1792 1794 if self.auth_token_access:
1793 1795 roles = self.auth_token_access
1794 1796 else:
1795 1797 roles = [UserApiKeys.ROLE_HTTP]
1796 1798 log.debug('AUTH TOKEN: checking auth for user %s and roles %s',
1797 1799 db_user, roles)
1798 1800 token_match = db_user.authenticate_by_token(
1799 1801 _auth_token, roles=roles)
1800 1802 else:
1801 1803 log.debug('Unable to fetch db instance for auth user: %s', user)
1802 1804 token_match = False
1803 1805
1804 1806 if _auth_token and token_match:
1805 1807 auth_token_access_valid = True
1806 1808 log.debug('AUTH TOKEN ****%s is VALID', _auth_token[-4:])
1807 1809 else:
1808 1810 auth_token_access_valid = False
1809 1811 if not _auth_token:
1810 1812 log.debug("AUTH TOKEN *NOT* present in request")
1811 1813 else:
1812 1814 log.warning("AUTH TOKEN ****%s *NOT* valid", _auth_token[-4:])
1813 1815
1814 1816 log.debug('Checking if %s is authenticated @ %s', user.username, loc)
1815 1817 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1816 1818 else 'AUTH_TOKEN_AUTH'
1817 1819
1818 1820 if ip_access_valid and (
1819 1821 user.is_authenticated or auth_token_access_valid):
1820 1822 log.info('user %s authenticating with:%s IS authenticated on func %s',
1821 1823 user, reason, loc)
1822 1824
1823 1825 return func(*fargs, **fkwargs)
1824 1826 else:
1825 1827 log.warning(
1826 1828 'user %s authenticating with:%s NOT authenticated on '
1827 1829 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s',
1828 1830 user, reason, loc, ip_access_valid, auth_token_access_valid)
1829 1831 # we preserve the get PARAM
1830 1832 came_from = get_came_from(request)
1831 1833
1832 1834 log.debug('redirecting to login page with %s', came_from)
1833 1835 raise HTTPFound(
1834 1836 h.route_path('login', _query={'came_from': came_from}))
1835 1837
1836 1838
1837 1839 class NotAnonymous(object):
1838 1840 """
1839 1841 Must be logged in to execute this function else
1840 1842 redirect to login page
1841 1843 """
1842 1844
1843 1845 def __call__(self, func):
1844 1846 return get_cython_compat_decorator(self.__wrapper, func)
1845 1847
1846 1848 def _get_request(self):
1847 1849 return get_request(self)
1848 1850
1849 1851 def __wrapper(self, func, *fargs, **fkwargs):
1850 1852 import rhodecode.lib.helpers as h
1851 1853 cls = fargs[0]
1852 1854 self.user = cls._rhodecode_user
1853 1855 request = self._get_request()
1854 1856 _ = request.translate
1855 1857 log.debug('Checking if user is not anonymous @%s', cls)
1856 1858
1857 1859 anonymous = self.user.username == User.DEFAULT_USER
1858 1860
1859 1861 if anonymous:
1860 1862 came_from = get_came_from(request)
1861 1863 h.flash(_('You need to be a registered user to '
1862 1864 'perform this action'),
1863 1865 category='warning')
1864 1866 raise HTTPFound(
1865 1867 h.route_path('login', _query={'came_from': came_from}))
1866 1868 else:
1867 1869 return func(*fargs, **fkwargs)
1868 1870
1869 1871
1870 1872 class PermsDecorator(object):
1871 1873 """
1872 1874 Base class for controller decorators, we extract the current user from
1873 1875 the class itself, which has it stored in base controllers
1874 1876 """
1875 1877
1876 1878 def __init__(self, *required_perms):
1877 1879 self.required_perms = set(required_perms)
1878 1880
1879 1881 def __call__(self, func):
1880 1882 return get_cython_compat_decorator(self.__wrapper, func)
1881 1883
1882 1884 def _get_request(self):
1883 1885 return get_request(self)
1884 1886
1885 1887 def __wrapper(self, func, *fargs, **fkwargs):
1886 1888 import rhodecode.lib.helpers as h
1887 1889 cls = fargs[0]
1888 1890 _user = cls._rhodecode_user
1889 1891 request = self._get_request()
1890 1892 _ = request.translate
1891 1893
1892 1894 log.debug('checking %s permissions %s for %s %s',
1893 1895 self.__class__.__name__, self.required_perms, cls, _user)
1894 1896
1895 1897 if self.check_permissions(_user):
1896 1898 log.debug('Permission granted for %s %s', cls, _user)
1897 1899 return func(*fargs, **fkwargs)
1898 1900
1899 1901 else:
1900 1902 log.debug('Permission denied for %s %s', cls, _user)
1901 1903 anonymous = _user.username == User.DEFAULT_USER
1902 1904
1903 1905 if anonymous:
1904 1906 came_from = get_came_from(self._get_request())
1905 1907 h.flash(_('You need to be signed in to view this page'),
1906 1908 category='warning')
1907 1909 raise HTTPFound(
1908 1910 h.route_path('login', _query={'came_from': came_from}))
1909 1911
1910 1912 else:
1911 1913 # redirect with 404 to prevent resource discovery
1912 1914 raise HTTPNotFound()
1913 1915
1914 1916 def check_permissions(self, user):
1915 1917 """Dummy function for overriding"""
1916 1918 raise NotImplementedError(
1917 1919 'You have to write this function in child class')
1918 1920
1919 1921
1920 1922 class HasPermissionAllDecorator(PermsDecorator):
1921 1923 """
1922 1924 Checks for access permission for all given predicates. All of them
1923 1925 have to be meet in order to fulfill the request
1924 1926 """
1925 1927
1926 1928 def check_permissions(self, user):
1927 1929 perms = user.permissions_with_scope({})
1928 1930 if self.required_perms.issubset(perms['global']):
1929 1931 return True
1930 1932 return False
1931 1933
1932 1934
1933 1935 class HasPermissionAnyDecorator(PermsDecorator):
1934 1936 """
1935 1937 Checks for access permission for any of given predicates. In order to
1936 1938 fulfill the request any of predicates must be meet
1937 1939 """
1938 1940
1939 1941 def check_permissions(self, user):
1940 1942 perms = user.permissions_with_scope({})
1941 1943 if self.required_perms.intersection(perms['global']):
1942 1944 return True
1943 1945 return False
1944 1946
1945 1947
1946 1948 class HasRepoPermissionAllDecorator(PermsDecorator):
1947 1949 """
1948 1950 Checks for access permission for all given predicates for specific
1949 1951 repository. All of them have to be meet in order to fulfill the request
1950 1952 """
1951 1953 def _get_repo_name(self):
1952 1954 _request = self._get_request()
1953 1955 return get_repo_slug(_request)
1954 1956
1955 1957 def check_permissions(self, user):
1956 1958 perms = user.permissions
1957 1959 repo_name = self._get_repo_name()
1958 1960
1959 1961 try:
1960 1962 user_perms = {perms['repositories'][repo_name]}
1961 1963 except KeyError:
1962 1964 log.debug('cannot locate repo with name: `%s` in permissions defs',
1963 1965 repo_name)
1964 1966 return False
1965 1967
1966 1968 log.debug('checking `%s` permissions for repo `%s`',
1967 1969 user_perms, repo_name)
1968 1970 if self.required_perms.issubset(user_perms):
1969 1971 return True
1970 1972 return False
1971 1973
1972 1974
1973 1975 class HasRepoPermissionAnyDecorator(PermsDecorator):
1974 1976 """
1975 1977 Checks for access permission for any of given predicates for specific
1976 1978 repository. In order to fulfill the request any of predicates must be meet
1977 1979 """
1978 1980 def _get_repo_name(self):
1979 1981 _request = self._get_request()
1980 1982 return get_repo_slug(_request)
1981 1983
1982 1984 def check_permissions(self, user):
1983 1985 perms = user.permissions
1984 1986 repo_name = self._get_repo_name()
1985 1987
1986 1988 try:
1987 1989 user_perms = {perms['repositories'][repo_name]}
1988 1990 except KeyError:
1989 1991 log.debug(
1990 1992 'cannot locate repo with name: `%s` in permissions defs',
1991 1993 repo_name)
1992 1994 return False
1993 1995
1994 1996 log.debug('checking `%s` permissions for repo `%s`',
1995 1997 user_perms, repo_name)
1996 1998 if self.required_perms.intersection(user_perms):
1997 1999 return True
1998 2000 return False
1999 2001
2000 2002
2001 2003 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
2002 2004 """
2003 2005 Checks for access permission for all given predicates for specific
2004 2006 repository group. All of them have to be meet in order to
2005 2007 fulfill the request
2006 2008 """
2007 2009 def _get_repo_group_name(self):
2008 2010 _request = self._get_request()
2009 2011 return get_repo_group_slug(_request)
2010 2012
2011 2013 def check_permissions(self, user):
2012 2014 perms = user.permissions
2013 2015 group_name = self._get_repo_group_name()
2014 2016 try:
2015 2017 user_perms = {perms['repositories_groups'][group_name]}
2016 2018 except KeyError:
2017 2019 log.debug(
2018 2020 'cannot locate repo group with name: `%s` in permissions defs',
2019 2021 group_name)
2020 2022 return False
2021 2023
2022 2024 log.debug('checking `%s` permissions for repo group `%s`',
2023 2025 user_perms, group_name)
2024 2026 if self.required_perms.issubset(user_perms):
2025 2027 return True
2026 2028 return False
2027 2029
2028 2030
2029 2031 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
2030 2032 """
2031 2033 Checks for access permission for any of given predicates for specific
2032 2034 repository group. In order to fulfill the request any
2033 2035 of predicates must be met
2034 2036 """
2035 2037 def _get_repo_group_name(self):
2036 2038 _request = self._get_request()
2037 2039 return get_repo_group_slug(_request)
2038 2040
2039 2041 def check_permissions(self, user):
2040 2042 perms = user.permissions
2041 2043 group_name = self._get_repo_group_name()
2042 2044
2043 2045 try:
2044 2046 user_perms = {perms['repositories_groups'][group_name]}
2045 2047 except KeyError:
2046 2048 log.debug(
2047 2049 'cannot locate repo group with name: `%s` in permissions defs',
2048 2050 group_name)
2049 2051 return False
2050 2052
2051 2053 log.debug('checking `%s` permissions for repo group `%s`',
2052 2054 user_perms, group_name)
2053 2055 if self.required_perms.intersection(user_perms):
2054 2056 return True
2055 2057 return False
2056 2058
2057 2059
2058 2060 class HasUserGroupPermissionAllDecorator(PermsDecorator):
2059 2061 """
2060 2062 Checks for access permission for all given predicates for specific
2061 2063 user group. All of them have to be meet in order to fulfill the request
2062 2064 """
2063 2065 def _get_user_group_name(self):
2064 2066 _request = self._get_request()
2065 2067 return get_user_group_slug(_request)
2066 2068
2067 2069 def check_permissions(self, user):
2068 2070 perms = user.permissions
2069 2071 group_name = self._get_user_group_name()
2070 2072 try:
2071 2073 user_perms = {perms['user_groups'][group_name]}
2072 2074 except KeyError:
2073 2075 return False
2074 2076
2075 2077 if self.required_perms.issubset(user_perms):
2076 2078 return True
2077 2079 return False
2078 2080
2079 2081
2080 2082 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
2081 2083 """
2082 2084 Checks for access permission for any of given predicates for specific
2083 2085 user group. In order to fulfill the request any of predicates must be meet
2084 2086 """
2085 2087 def _get_user_group_name(self):
2086 2088 _request = self._get_request()
2087 2089 return get_user_group_slug(_request)
2088 2090
2089 2091 def check_permissions(self, user):
2090 2092 perms = user.permissions
2091 2093 group_name = self._get_user_group_name()
2092 2094 try:
2093 2095 user_perms = {perms['user_groups'][group_name]}
2094 2096 except KeyError:
2095 2097 return False
2096 2098
2097 2099 if self.required_perms.intersection(user_perms):
2098 2100 return True
2099 2101 return False
2100 2102
2101 2103
2102 2104 # CHECK FUNCTIONS
2103 2105 class PermsFunction(object):
2104 2106 """Base function for other check functions"""
2105 2107
2106 2108 def __init__(self, *perms):
2107 2109 self.required_perms = set(perms)
2108 2110 self.repo_name = None
2109 2111 self.repo_group_name = None
2110 2112 self.user_group_name = None
2111 2113
2112 2114 def __bool__(self):
2113 2115 import inspect
2114 2116 frame = inspect.currentframe()
2115 2117 stack_trace = traceback.format_stack(frame)
2116 2118 log.error('Checking bool value on a class instance of perm '
2117 2119 'function is not allowed: %s', ''.join(stack_trace))
2118 2120 # rather than throwing errors, here we always return False so if by
2119 2121 # accident someone checks truth for just an instance it will always end
2120 2122 # up in returning False
2121 2123 return False
2122 2124 __nonzero__ = __bool__
2123 2125
2124 2126 def __call__(self, check_location='', user=None):
2125 2127 if not user:
2126 2128 log.debug('Using user attribute from global request')
2127 2129 request = self._get_request()
2128 2130 user = request.user
2129 2131
2130 2132 # init auth user if not already given
2131 2133 if not isinstance(user, AuthUser):
2132 2134 log.debug('Wrapping user %s into AuthUser', user)
2133 2135 user = AuthUser(user.user_id)
2134 2136
2135 2137 cls_name = self.__class__.__name__
2136 2138 check_scope = self._get_check_scope(cls_name)
2137 2139 check_location = check_location or 'unspecified location'
2138 2140
2139 2141 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
2140 2142 self.required_perms, user, check_scope, check_location)
2141 2143 if not user:
2142 2144 log.warning('Empty user given for permission check')
2143 2145 return False
2144 2146
2145 2147 if self.check_permissions(user):
2146 2148 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2147 2149 check_scope, user, check_location)
2148 2150 return True
2149 2151
2150 2152 else:
2151 2153 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2152 2154 check_scope, user, check_location)
2153 2155 return False
2154 2156
2155 2157 def _get_request(self):
2156 2158 return get_request(self)
2157 2159
2158 2160 def _get_check_scope(self, cls_name):
2159 2161 return {
2160 2162 'HasPermissionAll': 'GLOBAL',
2161 2163 'HasPermissionAny': 'GLOBAL',
2162 2164 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
2163 2165 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
2164 2166 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
2165 2167 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
2166 2168 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
2167 2169 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
2168 2170 }.get(cls_name, '?:%s' % cls_name)
2169 2171
2170 2172 def check_permissions(self, user):
2171 2173 """Dummy function for overriding"""
2172 2174 raise Exception('You have to write this function in child class')
2173 2175
2174 2176
2175 2177 class HasPermissionAll(PermsFunction):
2176 2178 def check_permissions(self, user):
2177 2179 perms = user.permissions_with_scope({})
2178 2180 if self.required_perms.issubset(perms.get('global')):
2179 2181 return True
2180 2182 return False
2181 2183
2182 2184
2183 2185 class HasPermissionAny(PermsFunction):
2184 2186 def check_permissions(self, user):
2185 2187 perms = user.permissions_with_scope({})
2186 2188 if self.required_perms.intersection(perms.get('global')):
2187 2189 return True
2188 2190 return False
2189 2191
2190 2192
2191 2193 class HasRepoPermissionAll(PermsFunction):
2192 2194 def __call__(self, repo_name=None, check_location='', user=None):
2193 2195 self.repo_name = repo_name
2194 2196 return super(HasRepoPermissionAll, self).__call__(check_location, user)
2195 2197
2196 2198 def _get_repo_name(self):
2197 2199 if not self.repo_name:
2198 2200 _request = self._get_request()
2199 2201 self.repo_name = get_repo_slug(_request)
2200 2202 return self.repo_name
2201 2203
2202 2204 def check_permissions(self, user):
2203 2205 self.repo_name = self._get_repo_name()
2204 2206 perms = user.permissions
2205 2207 try:
2206 2208 user_perms = {perms['repositories'][self.repo_name]}
2207 2209 except KeyError:
2208 2210 return False
2209 2211 if self.required_perms.issubset(user_perms):
2210 2212 return True
2211 2213 return False
2212 2214
2213 2215
2214 2216 class HasRepoPermissionAny(PermsFunction):
2215 2217 def __call__(self, repo_name=None, check_location='', user=None):
2216 2218 self.repo_name = repo_name
2217 2219 return super(HasRepoPermissionAny, self).__call__(check_location, user)
2218 2220
2219 2221 def _get_repo_name(self):
2220 2222 if not self.repo_name:
2221 2223 _request = self._get_request()
2222 2224 self.repo_name = get_repo_slug(_request)
2223 2225 return self.repo_name
2224 2226
2225 2227 def check_permissions(self, user):
2226 2228 self.repo_name = self._get_repo_name()
2227 2229 perms = user.permissions
2228 2230 try:
2229 2231 user_perms = {perms['repositories'][self.repo_name]}
2230 2232 except KeyError:
2231 2233 return False
2232 2234 if self.required_perms.intersection(user_perms):
2233 2235 return True
2234 2236 return False
2235 2237
2236 2238
2237 2239 class HasRepoGroupPermissionAny(PermsFunction):
2238 2240 def __call__(self, group_name=None, check_location='', user=None):
2239 2241 self.repo_group_name = group_name
2240 2242 return super(HasRepoGroupPermissionAny, self).__call__(check_location, user)
2241 2243
2242 2244 def check_permissions(self, user):
2243 2245 perms = user.permissions
2244 2246 try:
2245 2247 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2246 2248 except KeyError:
2247 2249 return False
2248 2250 if self.required_perms.intersection(user_perms):
2249 2251 return True
2250 2252 return False
2251 2253
2252 2254
2253 2255 class HasRepoGroupPermissionAll(PermsFunction):
2254 2256 def __call__(self, group_name=None, check_location='', user=None):
2255 2257 self.repo_group_name = group_name
2256 2258 return super(HasRepoGroupPermissionAll, self).__call__(check_location, user)
2257 2259
2258 2260 def check_permissions(self, user):
2259 2261 perms = user.permissions
2260 2262 try:
2261 2263 user_perms = {perms['repositories_groups'][self.repo_group_name]}
2262 2264 except KeyError:
2263 2265 return False
2264 2266 if self.required_perms.issubset(user_perms):
2265 2267 return True
2266 2268 return False
2267 2269
2268 2270
2269 2271 class HasUserGroupPermissionAny(PermsFunction):
2270 2272 def __call__(self, user_group_name=None, check_location='', user=None):
2271 2273 self.user_group_name = user_group_name
2272 2274 return super(HasUserGroupPermissionAny, self).__call__(check_location, user)
2273 2275
2274 2276 def check_permissions(self, user):
2275 2277 perms = user.permissions
2276 2278 try:
2277 2279 user_perms = {perms['user_groups'][self.user_group_name]}
2278 2280 except KeyError:
2279 2281 return False
2280 2282 if self.required_perms.intersection(user_perms):
2281 2283 return True
2282 2284 return False
2283 2285
2284 2286
2285 2287 class HasUserGroupPermissionAll(PermsFunction):
2286 2288 def __call__(self, user_group_name=None, check_location='', user=None):
2287 2289 self.user_group_name = user_group_name
2288 2290 return super(HasUserGroupPermissionAll, self).__call__(check_location, user)
2289 2291
2290 2292 def check_permissions(self, user):
2291 2293 perms = user.permissions
2292 2294 try:
2293 2295 user_perms = {perms['user_groups'][self.user_group_name]}
2294 2296 except KeyError:
2295 2297 return False
2296 2298 if self.required_perms.issubset(user_perms):
2297 2299 return True
2298 2300 return False
2299 2301
2300 2302
2301 2303 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
2302 2304 class HasPermissionAnyMiddleware(object):
2303 2305 def __init__(self, *perms):
2304 2306 self.required_perms = set(perms)
2305 2307
2306 2308 def __call__(self, auth_user, repo_name):
2307 2309 # repo_name MUST be unicode, since we handle keys in permission
2308 2310 # dict by unicode
2309 2311 repo_name = safe_unicode(repo_name)
2310 2312 log.debug(
2311 2313 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2312 2314 self.required_perms, auth_user, repo_name)
2313 2315
2314 2316 if self.check_permissions(auth_user, repo_name):
2315 2317 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2316 2318 repo_name, auth_user, 'PermissionMiddleware')
2317 2319 return True
2318 2320
2319 2321 else:
2320 2322 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2321 2323 repo_name, auth_user, 'PermissionMiddleware')
2322 2324 return False
2323 2325
2324 2326 def check_permissions(self, user, repo_name):
2325 2327 perms = user.permissions_with_scope({'repo_name': repo_name})
2326 2328
2327 2329 try:
2328 2330 user_perms = {perms['repositories'][repo_name]}
2329 2331 except Exception:
2330 2332 log.exception('Error while accessing user permissions')
2331 2333 return False
2332 2334
2333 2335 if self.required_perms.intersection(user_perms):
2334 2336 return True
2335 2337 return False
2336 2338
2337 2339
2338 2340 # SPECIAL VERSION TO HANDLE API AUTH
2339 2341 class _BaseApiPerm(object):
2340 2342 def __init__(self, *perms):
2341 2343 self.required_perms = set(perms)
2342 2344
2343 2345 def __call__(self, check_location=None, user=None, repo_name=None,
2344 2346 group_name=None, user_group_name=None):
2345 2347 cls_name = self.__class__.__name__
2346 2348 check_scope = 'global:%s' % (self.required_perms,)
2347 2349 if repo_name:
2348 2350 check_scope += ', repo_name:%s' % (repo_name,)
2349 2351
2350 2352 if group_name:
2351 2353 check_scope += ', repo_group_name:%s' % (group_name,)
2352 2354
2353 2355 if user_group_name:
2354 2356 check_scope += ', user_group_name:%s' % (user_group_name,)
2355 2357
2356 2358 log.debug('checking cls:%s %s %s @ %s',
2357 2359 cls_name, self.required_perms, check_scope, check_location)
2358 2360 if not user:
2359 2361 log.debug('Empty User passed into arguments')
2360 2362 return False
2361 2363
2362 2364 # process user
2363 2365 if not isinstance(user, AuthUser):
2364 2366 user = AuthUser(user.user_id)
2365 2367 if not check_location:
2366 2368 check_location = 'unspecified'
2367 2369 if self.check_permissions(user.permissions, repo_name, group_name,
2368 2370 user_group_name):
2369 2371 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2370 2372 check_scope, user, check_location)
2371 2373 return True
2372 2374
2373 2375 else:
2374 2376 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2375 2377 check_scope, user, check_location)
2376 2378 return False
2377 2379
2378 2380 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2379 2381 user_group_name=None):
2380 2382 """
2381 2383 implement in child class should return True if permissions are ok,
2382 2384 False otherwise
2383 2385
2384 2386 :param perm_defs: dict with permission definitions
2385 2387 :param repo_name: repo name
2386 2388 """
2387 2389 raise NotImplementedError()
2388 2390
2389 2391
2390 2392 class HasPermissionAllApi(_BaseApiPerm):
2391 2393 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2392 2394 user_group_name=None):
2393 2395 if self.required_perms.issubset(perm_defs.get('global')):
2394 2396 return True
2395 2397 return False
2396 2398
2397 2399
2398 2400 class HasPermissionAnyApi(_BaseApiPerm):
2399 2401 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2400 2402 user_group_name=None):
2401 2403 if self.required_perms.intersection(perm_defs.get('global')):
2402 2404 return True
2403 2405 return False
2404 2406
2405 2407
2406 2408 class HasRepoPermissionAllApi(_BaseApiPerm):
2407 2409 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2408 2410 user_group_name=None):
2409 2411 try:
2410 2412 _user_perms = {perm_defs['repositories'][repo_name]}
2411 2413 except KeyError:
2412 2414 log.warning(traceback.format_exc())
2413 2415 return False
2414 2416 if self.required_perms.issubset(_user_perms):
2415 2417 return True
2416 2418 return False
2417 2419
2418 2420
2419 2421 class HasRepoPermissionAnyApi(_BaseApiPerm):
2420 2422 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2421 2423 user_group_name=None):
2422 2424 try:
2423 2425 _user_perms = {perm_defs['repositories'][repo_name]}
2424 2426 except KeyError:
2425 2427 log.warning(traceback.format_exc())
2426 2428 return False
2427 2429 if self.required_perms.intersection(_user_perms):
2428 2430 return True
2429 2431 return False
2430 2432
2431 2433
2432 2434 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2433 2435 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2434 2436 user_group_name=None):
2435 2437 try:
2436 2438 _user_perms = {perm_defs['repositories_groups'][group_name]}
2437 2439 except KeyError:
2438 2440 log.warning(traceback.format_exc())
2439 2441 return False
2440 2442 if self.required_perms.intersection(_user_perms):
2441 2443 return True
2442 2444 return False
2443 2445
2444 2446
2445 2447 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2446 2448 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2447 2449 user_group_name=None):
2448 2450 try:
2449 2451 _user_perms = {perm_defs['repositories_groups'][group_name]}
2450 2452 except KeyError:
2451 2453 log.warning(traceback.format_exc())
2452 2454 return False
2453 2455 if self.required_perms.issubset(_user_perms):
2454 2456 return True
2455 2457 return False
2456 2458
2457 2459
2458 2460 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2459 2461 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2460 2462 user_group_name=None):
2461 2463 try:
2462 2464 _user_perms = {perm_defs['user_groups'][user_group_name]}
2463 2465 except KeyError:
2464 2466 log.warning(traceback.format_exc())
2465 2467 return False
2466 2468 if self.required_perms.intersection(_user_perms):
2467 2469 return True
2468 2470 return False
2469 2471
2470 2472
2471 2473 def check_ip_access(source_ip, allowed_ips=None):
2472 2474 """
2473 2475 Checks if source_ip is a subnet of any of allowed_ips.
2474 2476
2475 2477 :param source_ip:
2476 2478 :param allowed_ips: list of allowed ips together with mask
2477 2479 """
2478 2480 log.debug('checking if ip:%s is subnet of %s', source_ip, allowed_ips)
2479 2481 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
2480 2482 if isinstance(allowed_ips, (tuple, list, set)):
2481 2483 for ip in allowed_ips:
2482 2484 ip = safe_unicode(ip)
2483 2485 try:
2484 2486 network_address = ipaddress.ip_network(ip, strict=False)
2485 2487 if source_ip_address in network_address:
2486 2488 log.debug('IP %s is network %s', source_ip_address, network_address)
2487 2489 return True
2488 2490 # for any case we cannot determine the IP, don't crash just
2489 2491 # skip it and log as error, we want to say forbidden still when
2490 2492 # sending bad IP
2491 2493 except Exception:
2492 2494 log.error(traceback.format_exc())
2493 2495 continue
2494 2496 return False
2495 2497
2496 2498
2497 2499 def get_cython_compat_decorator(wrapper, func):
2498 2500 """
2499 2501 Creates a cython compatible decorator. The previously used
2500 2502 decorator.decorator() function seems to be incompatible with cython.
2501 2503
2502 2504 :param wrapper: __wrapper method of the decorator class
2503 2505 :param func: decorated function
2504 2506 """
2505 2507 @wraps(func)
2506 2508 def local_wrapper(*args, **kwds):
2507 2509 return wrapper(func, *args, **kwds)
2508 2510 local_wrapper.__wrapped__ = func
2509 2511 return local_wrapper
2510 2512
2511 2513
@@ -1,2117 +1,2148 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27 import base64
28 import collections
28 29
29 30 import os
30 31 import random
31 32 import hashlib
32 33 import StringIO
33 34 import textwrap
34 35 import urllib
35 36 import math
36 37 import logging
37 38 import re
38 39 import time
39 40 import string
40 41 import hashlib
41 42 import regex
42 43 from collections import OrderedDict
43 44
44 45 import pygments
45 46 import itertools
46 47 import fnmatch
47 48 import bleach
48 49
49 50 from pyramid import compat
50 51 from datetime import datetime
51 52 from functools import partial
52 53 from pygments.formatters.html import HtmlFormatter
53 54 from pygments.lexers import (
54 55 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
55 56
56 57 from pyramid.threadlocal import get_current_request
57 58 from tempita import looper
58 59 from webhelpers2.html import literal, HTML, escape
59 60 from webhelpers2.html._autolink import _auto_link_urls
60 61 from webhelpers2.html.tools import (
61 62 button_to, highlight, js_obfuscate, strip_links, strip_tags)
62 63
63 64 from webhelpers2.text import (
64 65 chop_at, collapse, convert_accented_entities,
65 66 convert_misc_entities, lchop, plural, rchop, remove_formatting,
66 67 replace_whitespace, urlify, truncate, wrap_paragraphs)
67 68 from webhelpers2.date import time_ago_in_words
68 69
69 70 from webhelpers2.html.tags import (
70 71 _input, NotGiven, _make_safe_id_component as safeid,
71 72 form as insecure_form,
72 73 auto_discovery_link, checkbox, end_form, file,
73 74 hidden, image, javascript_link, link_to, link_to_if, link_to_unless, ol,
74 75 select as raw_select, stylesheet_link, submit, text, password, textarea,
75 76 ul, radio, Options)
76 77
77 78 from webhelpers2.number import format_byte_size
78 79
79 80 from rhodecode.lib.action_parser import action_parser
80 81 from rhodecode.lib.pagination import Page, RepoPage, SqlPage
81 82 from rhodecode.lib.ext_json import json
82 83 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
83 84 from rhodecode.lib.utils2 import (
84 85 str2bool, safe_unicode, safe_str,
85 86 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime,
86 87 AttributeDict, safe_int, md5, md5_safe, get_host_info)
87 88 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
88 89 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
89 90 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
90 91 from rhodecode.lib.vcs.conf.settings import ARCHIVE_SPECS
91 92 from rhodecode.lib.index.search_utils import get_matching_line_offsets
92 93 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
93 94 from rhodecode.model.changeset_status import ChangesetStatusModel
94 95 from rhodecode.model.db import Permission, User, Repository, UserApiKeys, FileStore
95 96 from rhodecode.model.repo_group import RepoGroupModel
96 97 from rhodecode.model.settings import IssueTrackerSettingsModel
97 98
98 99
99 100 log = logging.getLogger(__name__)
100 101
101 102
102 103 DEFAULT_USER = User.DEFAULT_USER
103 104 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
104 105
105 106
106 107 def asset(path, ver=None, **kwargs):
107 108 """
108 109 Helper to generate a static asset file path for rhodecode assets
109 110
110 111 eg. h.asset('images/image.png', ver='3923')
111 112
112 113 :param path: path of asset
113 114 :param ver: optional version query param to append as ?ver=
114 115 """
115 116 request = get_current_request()
116 117 query = {}
117 118 query.update(kwargs)
118 119 if ver:
119 120 query = {'ver': ver}
120 121 return request.static_path(
121 122 'rhodecode:public/{}'.format(path), _query=query)
122 123
123 124
124 125 default_html_escape_table = {
125 126 ord('&'): u'&amp;',
126 127 ord('<'): u'&lt;',
127 128 ord('>'): u'&gt;',
128 129 ord('"'): u'&quot;',
129 130 ord("'"): u'&#39;',
130 131 }
131 132
132 133
133 134 def html_escape(text, html_escape_table=default_html_escape_table):
134 135 """Produce entities within text."""
135 136 return text.translate(html_escape_table)
136 137
137 138
138 139 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
139 140 """
140 141 Truncate string ``s`` at the first occurrence of ``sub``.
141 142
142 143 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
143 144 """
144 145 suffix_if_chopped = suffix_if_chopped or ''
145 146 pos = s.find(sub)
146 147 if pos == -1:
147 148 return s
148 149
149 150 if inclusive:
150 151 pos += len(sub)
151 152
152 153 chopped = s[:pos]
153 154 left = s[pos:].strip()
154 155
155 156 if left and suffix_if_chopped:
156 157 chopped += suffix_if_chopped
157 158
158 159 return chopped
159 160
160 161
161 162 def shorter(text, size=20, prefix=False):
162 163 postfix = '...'
163 164 if len(text) > size:
164 165 if prefix:
165 166 # shorten in front
166 167 return postfix + text[-(size - len(postfix)):]
167 168 else:
168 169 return text[:size - len(postfix)] + postfix
169 170 return text
170 171
171 172
172 173 def reset(name, value=None, id=NotGiven, type="reset", **attrs):
173 174 """
174 175 Reset button
175 176 """
176 177 return _input(type, name, value, id, attrs)
177 178
178 179
179 180 def select(name, selected_values, options, id=NotGiven, **attrs):
180 181
181 182 if isinstance(options, (list, tuple)):
182 183 options_iter = options
183 184 # Handle old value,label lists ... where value also can be value,label lists
184 185 options = Options()
185 186 for opt in options_iter:
186 187 if isinstance(opt, tuple) and len(opt) == 2:
187 188 value, label = opt
188 189 elif isinstance(opt, basestring):
189 190 value = label = opt
190 191 else:
191 192 raise ValueError('invalid select option type %r' % type(opt))
192 193
193 194 if isinstance(value, (list, tuple)):
194 195 option_group = options.add_optgroup(label)
195 196 for opt2 in value:
196 197 if isinstance(opt2, tuple) and len(opt2) == 2:
197 198 group_value, group_label = opt2
198 199 elif isinstance(opt2, basestring):
199 200 group_value = group_label = opt2
200 201 else:
201 202 raise ValueError('invalid select option type %r' % type(opt2))
202 203
203 204 option_group.add_option(group_label, group_value)
204 205 else:
205 206 options.add_option(label, value)
206 207
207 208 return raw_select(name, selected_values, options, id=id, **attrs)
208 209
209 210
210 211 def branding(name, length=40):
211 212 return truncate(name, length, indicator="")
212 213
213 214
214 215 def FID(raw_id, path):
215 216 """
216 217 Creates a unique ID for filenode based on it's hash of path and commit
217 218 it's safe to use in urls
218 219
219 220 :param raw_id:
220 221 :param path:
221 222 """
222 223
223 224 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
224 225
225 226
226 227 class _GetError(object):
227 228 """Get error from form_errors, and represent it as span wrapped error
228 229 message
229 230
230 231 :param field_name: field to fetch errors for
231 232 :param form_errors: form errors dict
232 233 """
233 234
234 235 def __call__(self, field_name, form_errors):
235 236 tmpl = """<span class="error_msg">%s</span>"""
236 237 if form_errors and field_name in form_errors:
237 238 return literal(tmpl % form_errors.get(field_name))
238 239
239 240
240 241 get_error = _GetError()
241 242
242 243
243 244 class _ToolTip(object):
244 245
245 246 def __call__(self, tooltip_title, trim_at=50):
246 247 """
247 248 Special function just to wrap our text into nice formatted
248 249 autowrapped text
249 250
250 251 :param tooltip_title:
251 252 """
252 253 tooltip_title = escape(tooltip_title)
253 254 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
254 255 return tooltip_title
255 256
256 257
257 258 tooltip = _ToolTip()
258 259
259 260 files_icon = u'<i class="file-breadcrumb-copy tooltip icon-clipboard clipboard-action" data-clipboard-text="{}" title="Copy file path"></i>'
260 261
261 262
262 263 def files_breadcrumbs(repo_name, repo_type, commit_id, file_path, landing_ref_name=None, at_ref=None,
263 264 limit_items=False, linkify_last_item=False, hide_last_item=False,
264 265 copy_path_icon=True):
265 266 if isinstance(file_path, str):
266 267 file_path = safe_unicode(file_path)
267 268
268 269 if at_ref:
269 270 route_qry = {'at': at_ref}
270 271 default_landing_ref = at_ref or landing_ref_name or commit_id
271 272 else:
272 273 route_qry = None
273 274 default_landing_ref = commit_id
274 275
275 276 # first segment is a `HOME` link to repo files root location
276 277 root_name = literal(u'<i class="icon-home"></i>')
277 278
278 279 url_segments = [
279 280 link_to(
280 281 root_name,
281 282 repo_files_by_ref_url(
282 283 repo_name,
283 284 repo_type,
284 285 f_path=None, # None here is a special case for SVN repos,
285 286 # that won't prefix with a ref
286 287 ref_name=default_landing_ref,
287 288 commit_id=commit_id,
288 289 query=route_qry
289 290 )
290 291 )]
291 292
292 293 path_segments = file_path.split('/')
293 294 last_cnt = len(path_segments) - 1
294 295 for cnt, segment in enumerate(path_segments):
295 296 if not segment:
296 297 continue
297 298 segment_html = escape(segment)
298 299
299 300 last_item = cnt == last_cnt
300 301
301 302 if last_item and hide_last_item:
302 303 # iterate over and hide last element
303 304 continue
304 305
305 306 if last_item and linkify_last_item is False:
306 307 # plain version
307 308 url_segments.append(segment_html)
308 309 else:
309 310 url_segments.append(
310 311 link_to(
311 312 segment_html,
312 313 repo_files_by_ref_url(
313 314 repo_name,
314 315 repo_type,
315 316 f_path='/'.join(path_segments[:cnt + 1]),
316 317 ref_name=default_landing_ref,
317 318 commit_id=commit_id,
318 319 query=route_qry
319 320 ),
320 321 ))
321 322
322 323 limited_url_segments = url_segments[:1] + ['...'] + url_segments[-5:]
323 324 if limit_items and len(limited_url_segments) < len(url_segments):
324 325 url_segments = limited_url_segments
325 326
326 327 full_path = file_path
327 328 if copy_path_icon:
328 329 icon = files_icon.format(escape(full_path))
329 330 else:
330 331 icon = ''
331 332
332 333 if file_path == '':
333 334 return root_name
334 335 else:
335 336 return literal(' / '.join(url_segments) + icon)
336 337
337 338
338 339 def files_url_data(request):
339 340 matchdict = request.matchdict
340 341
341 342 if 'f_path' not in matchdict:
342 343 matchdict['f_path'] = ''
343 344
344 345 if 'commit_id' not in matchdict:
345 346 matchdict['commit_id'] = 'tip'
346 347
347 348 return json.dumps(matchdict)
348 349
349 350
350 351 def repo_files_by_ref_url(db_repo_name, db_repo_type, f_path, ref_name, commit_id, query=None, ):
351 352 _is_svn = is_svn(db_repo_type)
352 353 final_f_path = f_path
353 354
354 355 if _is_svn:
355 356 """
356 357 For SVN the ref_name cannot be used as a commit_id, it needs to be prefixed with
357 358 actually commit_id followed by the ref_name. This should be done only in case
358 359 This is a initial landing url, without additional paths.
359 360
360 361 like: /1000/tags/1.0.0/?at=tags/1.0.0
361 362 """
362 363
363 364 if ref_name and ref_name != 'tip':
364 365 # NOTE(marcink): for svn the ref_name is actually the stored path, so we prefix it
365 366 # for SVN we only do this magic prefix if it's root, .eg landing revision
366 367 # of files link. If we are in the tree we don't need this since we traverse the url
367 368 # that has everything stored
368 369 if f_path in ['', '/']:
369 370 final_f_path = '/'.join([ref_name, f_path])
370 371
371 372 # SVN always needs a commit_id explicitly, without a named REF
372 373 default_commit_id = commit_id
373 374 else:
374 375 """
375 376 For git and mercurial we construct a new URL using the names instead of commit_id
376 377 like: /master/some_path?at=master
377 378 """
378 379 # We currently do not support branches with slashes
379 380 if '/' in ref_name:
380 381 default_commit_id = commit_id
381 382 else:
382 383 default_commit_id = ref_name
383 384
384 385 # sometimes we pass f_path as None, to indicate explicit no prefix,
385 386 # we translate it to string to not have None
386 387 final_f_path = final_f_path or ''
387 388
388 389 files_url = route_path(
389 390 'repo_files',
390 391 repo_name=db_repo_name,
391 392 commit_id=default_commit_id,
392 393 f_path=final_f_path,
393 394 _query=query
394 395 )
395 396 return files_url
396 397
397 398
398 399 def code_highlight(code, lexer, formatter, use_hl_filter=False):
399 400 """
400 401 Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``.
401 402
402 403 If ``outfile`` is given and a valid file object (an object
403 404 with a ``write`` method), the result will be written to it, otherwise
404 405 it is returned as a string.
405 406 """
406 407 if use_hl_filter:
407 408 # add HL filter
408 409 from rhodecode.lib.index import search_utils
409 410 lexer.add_filter(search_utils.ElasticSearchHLFilter())
410 411 return pygments.format(pygments.lex(code, lexer), formatter)
411 412
412 413
413 414 class CodeHtmlFormatter(HtmlFormatter):
414 415 """
415 416 My code Html Formatter for source codes
416 417 """
417 418
418 419 def wrap(self, source, outfile):
419 420 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
420 421
421 422 def _wrap_code(self, source):
422 423 for cnt, it in enumerate(source):
423 424 i, t = it
424 425 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
425 426 yield i, t
426 427
427 428 def _wrap_tablelinenos(self, inner):
428 429 dummyoutfile = StringIO.StringIO()
429 430 lncount = 0
430 431 for t, line in inner:
431 432 if t:
432 433 lncount += 1
433 434 dummyoutfile.write(line)
434 435
435 436 fl = self.linenostart
436 437 mw = len(str(lncount + fl - 1))
437 438 sp = self.linenospecial
438 439 st = self.linenostep
439 440 la = self.lineanchors
440 441 aln = self.anchorlinenos
441 442 nocls = self.noclasses
442 443 if sp:
443 444 lines = []
444 445
445 446 for i in range(fl, fl + lncount):
446 447 if i % st == 0:
447 448 if i % sp == 0:
448 449 if aln:
449 450 lines.append('<a href="#%s%d" class="special">%*d</a>' %
450 451 (la, i, mw, i))
451 452 else:
452 453 lines.append('<span class="special">%*d</span>' % (mw, i))
453 454 else:
454 455 if aln:
455 456 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
456 457 else:
457 458 lines.append('%*d' % (mw, i))
458 459 else:
459 460 lines.append('')
460 461 ls = '\n'.join(lines)
461 462 else:
462 463 lines = []
463 464 for i in range(fl, fl + lncount):
464 465 if i % st == 0:
465 466 if aln:
466 467 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
467 468 else:
468 469 lines.append('%*d' % (mw, i))
469 470 else:
470 471 lines.append('')
471 472 ls = '\n'.join(lines)
472 473
473 474 # in case you wonder about the seemingly redundant <div> here: since the
474 475 # content in the other cell also is wrapped in a div, some browsers in
475 476 # some configurations seem to mess up the formatting...
476 477 if nocls:
477 478 yield 0, ('<table class="%stable">' % self.cssclass +
478 479 '<tr><td><div class="linenodiv" '
479 480 'style="background-color: #f0f0f0; padding-right: 10px">'
480 481 '<pre style="line-height: 125%">' +
481 482 ls + '</pre></div></td><td id="hlcode" class="code">')
482 483 else:
483 484 yield 0, ('<table class="%stable">' % self.cssclass +
484 485 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
485 486 ls + '</pre></div></td><td id="hlcode" class="code">')
486 487 yield 0, dummyoutfile.getvalue()
487 488 yield 0, '</td></tr></table>'
488 489
489 490
490 491 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
491 492 def __init__(self, **kw):
492 493 # only show these line numbers if set
493 494 self.only_lines = kw.pop('only_line_numbers', [])
494 495 self.query_terms = kw.pop('query_terms', [])
495 496 self.max_lines = kw.pop('max_lines', 5)
496 497 self.line_context = kw.pop('line_context', 3)
497 498 self.url = kw.pop('url', None)
498 499
499 500 super(CodeHtmlFormatter, self).__init__(**kw)
500 501
501 502 def _wrap_code(self, source):
502 503 for cnt, it in enumerate(source):
503 504 i, t = it
504 505 t = '<pre>%s</pre>' % t
505 506 yield i, t
506 507
507 508 def _wrap_tablelinenos(self, inner):
508 509 yield 0, '<table class="code-highlight %stable">' % self.cssclass
509 510
510 511 last_shown_line_number = 0
511 512 current_line_number = 1
512 513
513 514 for t, line in inner:
514 515 if not t:
515 516 yield t, line
516 517 continue
517 518
518 519 if current_line_number in self.only_lines:
519 520 if last_shown_line_number + 1 != current_line_number:
520 521 yield 0, '<tr>'
521 522 yield 0, '<td class="line">...</td>'
522 523 yield 0, '<td id="hlcode" class="code"></td>'
523 524 yield 0, '</tr>'
524 525
525 526 yield 0, '<tr>'
526 527 if self.url:
527 528 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
528 529 self.url, current_line_number, current_line_number)
529 530 else:
530 531 yield 0, '<td class="line"><a href="">%i</a></td>' % (
531 532 current_line_number)
532 533 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
533 534 yield 0, '</tr>'
534 535
535 536 last_shown_line_number = current_line_number
536 537
537 538 current_line_number += 1
538 539
539 540 yield 0, '</table>'
540 541
541 542
542 543 def hsv_to_rgb(h, s, v):
543 544 """ Convert hsv color values to rgb """
544 545
545 546 if s == 0.0:
546 547 return v, v, v
547 548 i = int(h * 6.0) # XXX assume int() truncates!
548 549 f = (h * 6.0) - i
549 550 p = v * (1.0 - s)
550 551 q = v * (1.0 - s * f)
551 552 t = v * (1.0 - s * (1.0 - f))
552 553 i = i % 6
553 554 if i == 0:
554 555 return v, t, p
555 556 if i == 1:
556 557 return q, v, p
557 558 if i == 2:
558 559 return p, v, t
559 560 if i == 3:
560 561 return p, q, v
561 562 if i == 4:
562 563 return t, p, v
563 564 if i == 5:
564 565 return v, p, q
565 566
566 567
567 568 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
568 569 """
569 570 Generator for getting n of evenly distributed colors using
570 571 hsv color and golden ratio. It always return same order of colors
571 572
572 573 :param n: number of colors to generate
573 574 :param saturation: saturation of returned colors
574 575 :param lightness: lightness of returned colors
575 576 :returns: RGB tuple
576 577 """
577 578
578 579 golden_ratio = 0.618033988749895
579 580 h = 0.22717784590367374
580 581
581 582 for _ in xrange(n):
582 583 h += golden_ratio
583 584 h %= 1
584 585 HSV_tuple = [h, saturation, lightness]
585 586 RGB_tuple = hsv_to_rgb(*HSV_tuple)
586 587 yield map(lambda x: str(int(x * 256)), RGB_tuple)
587 588
588 589
589 590 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
590 591 """
591 592 Returns a function which when called with an argument returns a unique
592 593 color for that argument, eg.
593 594
594 595 :param n: number of colors to generate
595 596 :param saturation: saturation of returned colors
596 597 :param lightness: lightness of returned colors
597 598 :returns: css RGB string
598 599
599 600 >>> color_hash = color_hasher()
600 601 >>> color_hash('hello')
601 602 'rgb(34, 12, 59)'
602 603 >>> color_hash('hello')
603 604 'rgb(34, 12, 59)'
604 605 >>> color_hash('other')
605 606 'rgb(90, 224, 159)'
606 607 """
607 608
608 609 color_dict = {}
609 610 cgenerator = unique_color_generator(
610 611 saturation=saturation, lightness=lightness)
611 612
612 613 def get_color_string(thing):
613 614 if thing in color_dict:
614 615 col = color_dict[thing]
615 616 else:
616 617 col = color_dict[thing] = cgenerator.next()
617 618 return "rgb(%s)" % (', '.join(col))
618 619
619 620 return get_color_string
620 621
621 622
622 623 def get_lexer_safe(mimetype=None, filepath=None):
623 624 """
624 625 Tries to return a relevant pygments lexer using mimetype/filepath name,
625 626 defaulting to plain text if none could be found
626 627 """
627 628 lexer = None
628 629 try:
629 630 if mimetype:
630 631 lexer = get_lexer_for_mimetype(mimetype)
631 632 if not lexer:
632 633 lexer = get_lexer_for_filename(filepath)
633 634 except pygments.util.ClassNotFound:
634 635 pass
635 636
636 637 if not lexer:
637 638 lexer = get_lexer_by_name('text')
638 639
639 640 return lexer
640 641
641 642
642 643 def get_lexer_for_filenode(filenode):
643 644 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
644 645 return lexer
645 646
646 647
647 648 def pygmentize(filenode, **kwargs):
648 649 """
649 650 pygmentize function using pygments
650 651
651 652 :param filenode:
652 653 """
653 654 lexer = get_lexer_for_filenode(filenode)
654 655 return literal(code_highlight(filenode.content, lexer,
655 656 CodeHtmlFormatter(**kwargs)))
656 657
657 658
658 659 def is_following_repo(repo_name, user_id):
659 660 from rhodecode.model.scm import ScmModel
660 661 return ScmModel().is_following_repo(repo_name, user_id)
661 662
662 663
663 664 class _Message(object):
664 665 """A message returned by ``Flash.pop_messages()``.
665 666
666 667 Converting the message to a string returns the message text. Instances
667 668 also have the following attributes:
668 669
669 670 * ``message``: the message text.
670 671 * ``category``: the category specified when the message was created.
671 672 """
672 673
673 674 def __init__(self, category, message, sub_data=None):
674 675 self.category = category
675 676 self.message = message
676 677 self.sub_data = sub_data or {}
677 678
678 679 def __str__(self):
679 680 return self.message
680 681
681 682 __unicode__ = __str__
682 683
683 684 def __html__(self):
684 685 return escape(safe_unicode(self.message))
685 686
686 687
687 688 class Flash(object):
688 689 # List of allowed categories. If None, allow any category.
689 690 categories = ["warning", "notice", "error", "success"]
690 691
691 692 # Default category if none is specified.
692 693 default_category = "notice"
693 694
694 695 def __init__(self, session_key="flash", categories=None,
695 696 default_category=None):
696 697 """
697 698 Instantiate a ``Flash`` object.
698 699
699 700 ``session_key`` is the key to save the messages under in the user's
700 701 session.
701 702
702 703 ``categories`` is an optional list which overrides the default list
703 704 of categories.
704 705
705 706 ``default_category`` overrides the default category used for messages
706 707 when none is specified.
707 708 """
708 709 self.session_key = session_key
709 710 if categories is not None:
710 711 self.categories = categories
711 712 if default_category is not None:
712 713 self.default_category = default_category
713 714 if self.categories and self.default_category not in self.categories:
714 715 raise ValueError(
715 716 "unrecognized default category %r" % (self.default_category,))
716 717
717 718 def pop_messages(self, session=None, request=None):
718 719 """
719 720 Return all accumulated messages and delete them from the session.
720 721
721 722 The return value is a list of ``Message`` objects.
722 723 """
723 724 messages = []
724 725
725 726 if not session:
726 727 if not request:
727 728 request = get_current_request()
728 729 session = request.session
729 730
730 731 # Pop the 'old' pylons flash messages. They are tuples of the form
731 732 # (category, message)
732 733 for cat, msg in session.pop(self.session_key, []):
733 734 messages.append(_Message(cat, msg))
734 735
735 736 # Pop the 'new' pyramid flash messages for each category as list
736 737 # of strings.
737 738 for cat in self.categories:
738 739 for msg in session.pop_flash(queue=cat):
739 740 sub_data = {}
740 741 if hasattr(msg, 'rsplit'):
741 742 flash_data = msg.rsplit('|DELIM|', 1)
742 743 org_message = flash_data[0]
743 744 if len(flash_data) > 1:
744 745 sub_data = json.loads(flash_data[1])
745 746 else:
746 747 org_message = msg
747 748
748 749 messages.append(_Message(cat, org_message, sub_data=sub_data))
749 750
750 751 # Map messages from the default queue to the 'notice' category.
751 752 for msg in session.pop_flash():
752 753 messages.append(_Message('notice', msg))
753 754
754 755 session.save()
755 756 return messages
756 757
757 758 def json_alerts(self, session=None, request=None):
758 759 payloads = []
759 760 messages = flash.pop_messages(session=session, request=request) or []
760 761 for message in messages:
761 762 payloads.append({
762 763 'message': {
763 764 'message': u'{}'.format(message.message),
764 765 'level': message.category,
765 766 'force': True,
766 767 'subdata': message.sub_data
767 768 }
768 769 })
769 770 return json.dumps(payloads)
770 771
771 772 def __call__(self, message, category=None, ignore_duplicate=True,
772 773 session=None, request=None):
773 774
774 775 if not session:
775 776 if not request:
776 777 request = get_current_request()
777 778 session = request.session
778 779
779 780 session.flash(
780 781 message, queue=category, allow_duplicate=not ignore_duplicate)
781 782
782 783
783 784 flash = Flash()
784 785
785 786 #==============================================================================
786 787 # SCM FILTERS available via h.
787 788 #==============================================================================
788 789 from rhodecode.lib.vcs.utils import author_name, author_email
789 790 from rhodecode.lib.utils2 import age, age_from_seconds
790 791 from rhodecode.model.db import User, ChangesetStatus
791 792
792 793
793 794 email = author_email
794 795
795 796
796 797 def capitalize(raw_text):
797 798 return raw_text.capitalize()
798 799
799 800
800 801 def short_id(long_id):
801 802 return long_id[:12]
802 803
803 804
804 805 def hide_credentials(url):
805 806 from rhodecode.lib.utils2 import credentials_filter
806 807 return credentials_filter(url)
807 808
808 809
809 810 import pytz
810 811 import tzlocal
811 812 local_timezone = tzlocal.get_localzone()
812 813
813 814
814 815 def get_timezone(datetime_iso, time_is_local=False):
815 816 tzinfo = '+00:00'
816 817
817 818 # detect if we have a timezone info, otherwise, add it
818 819 if time_is_local and isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
819 820 force_timezone = os.environ.get('RC_TIMEZONE', '')
820 821 if force_timezone:
821 822 force_timezone = pytz.timezone(force_timezone)
822 823 timezone = force_timezone or local_timezone
823 824 offset = timezone.localize(datetime_iso).strftime('%z')
824 825 tzinfo = '{}:{}'.format(offset[:-2], offset[-2:])
825 826 return tzinfo
826 827
827 828
828 829 def age_component(datetime_iso, value=None, time_is_local=False, tooltip=True):
829 830 title = value or format_date(datetime_iso)
830 831 tzinfo = get_timezone(datetime_iso, time_is_local=time_is_local)
831 832
832 833 return literal(
833 834 '<time class="timeago {cls}" title="{tt_title}" datetime="{dt}{tzinfo}">{title}</time>'.format(
834 835 cls='tooltip' if tooltip else '',
835 836 tt_title=('{title}{tzinfo}'.format(title=title, tzinfo=tzinfo)) if tooltip else '',
836 837 title=title, dt=datetime_iso, tzinfo=tzinfo
837 838 ))
838 839
839 840
840 841 def _shorten_commit_id(commit_id, commit_len=None):
841 842 if commit_len is None:
842 843 request = get_current_request()
843 844 commit_len = request.call_context.visual.show_sha_length
844 845 return commit_id[:commit_len]
845 846
846 847
847 848 def show_id(commit, show_idx=None, commit_len=None):
848 849 """
849 850 Configurable function that shows ID
850 851 by default it's r123:fffeeefffeee
851 852
852 853 :param commit: commit instance
853 854 """
854 855 if show_idx is None:
855 856 request = get_current_request()
856 857 show_idx = request.call_context.visual.show_revision_number
857 858
858 859 raw_id = _shorten_commit_id(commit.raw_id, commit_len=commit_len)
859 860 if show_idx:
860 861 return 'r%s:%s' % (commit.idx, raw_id)
861 862 else:
862 863 return '%s' % (raw_id, )
863 864
864 865
865 866 def format_date(date):
866 867 """
867 868 use a standardized formatting for dates used in RhodeCode
868 869
869 870 :param date: date/datetime object
870 871 :return: formatted date
871 872 """
872 873
873 874 if date:
874 875 _fmt = "%a, %d %b %Y %H:%M:%S"
875 876 return safe_unicode(date.strftime(_fmt))
876 877
877 878 return u""
878 879
879 880
880 881 class _RepoChecker(object):
881 882
882 883 def __init__(self, backend_alias):
883 884 self._backend_alias = backend_alias
884 885
885 886 def __call__(self, repository):
886 887 if hasattr(repository, 'alias'):
887 888 _type = repository.alias
888 889 elif hasattr(repository, 'repo_type'):
889 890 _type = repository.repo_type
890 891 else:
891 892 _type = repository
892 893 return _type == self._backend_alias
893 894
894 895
895 896 is_git = _RepoChecker('git')
896 897 is_hg = _RepoChecker('hg')
897 898 is_svn = _RepoChecker('svn')
898 899
899 900
900 901 def get_repo_type_by_name(repo_name):
901 902 repo = Repository.get_by_repo_name(repo_name)
902 903 if repo:
903 904 return repo.repo_type
904 905
905 906
906 907 def is_svn_without_proxy(repository):
907 908 if is_svn(repository):
908 909 from rhodecode.model.settings import VcsSettingsModel
909 910 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
910 911 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
911 912 return False
912 913
913 914
914 915 def discover_user(author):
915 916 """
916 917 Tries to discover RhodeCode User based on the author string. Author string
917 918 is typically `FirstName LastName <email@address.com>`
918 919 """
919 920
920 921 # if author is already an instance use it for extraction
921 922 if isinstance(author, User):
922 923 return author
923 924
924 925 # Valid email in the attribute passed, see if they're in the system
925 926 _email = author_email(author)
926 927 if _email != '':
927 928 user = User.get_by_email(_email, case_insensitive=True, cache=True)
928 929 if user is not None:
929 930 return user
930 931
931 932 # Maybe it's a username, we try to extract it and fetch by username ?
932 933 _author = author_name(author)
933 934 user = User.get_by_username(_author, case_insensitive=True, cache=True)
934 935 if user is not None:
935 936 return user
936 937
937 938 return None
938 939
939 940
940 941 def email_or_none(author):
941 942 # extract email from the commit string
942 943 _email = author_email(author)
943 944
944 945 # If we have an email, use it, otherwise
945 946 # see if it contains a username we can get an email from
946 947 if _email != '':
947 948 return _email
948 949 else:
949 950 user = User.get_by_username(
950 951 author_name(author), case_insensitive=True, cache=True)
951 952
952 953 if user is not None:
953 954 return user.email
954 955
955 956 # No valid email, not a valid user in the system, none!
956 957 return None
957 958
958 959
959 960 def link_to_user(author, length=0, **kwargs):
960 961 user = discover_user(author)
961 962 # user can be None, but if we have it already it means we can re-use it
962 963 # in the person() function, so we save 1 intensive-query
963 964 if user:
964 965 author = user
965 966
966 967 display_person = person(author, 'username_or_name_or_email')
967 968 if length:
968 969 display_person = shorter(display_person, length)
969 970
970 971 if user and user.username != user.DEFAULT_USER:
971 972 return link_to(
972 973 escape(display_person),
973 974 route_path('user_profile', username=user.username),
974 975 **kwargs)
975 976 else:
976 977 return escape(display_person)
977 978
978 979
979 980 def link_to_group(users_group_name, **kwargs):
980 981 return link_to(
981 982 escape(users_group_name),
982 983 route_path('user_group_profile', user_group_name=users_group_name),
983 984 **kwargs)
984 985
985 986
986 987 def person(author, show_attr="username_and_name"):
987 988 user = discover_user(author)
988 989 if user:
989 990 return getattr(user, show_attr)
990 991 else:
991 992 _author = author_name(author)
992 993 _email = email(author)
993 994 return _author or _email
994 995
995 996
996 997 def author_string(email):
997 998 if email:
998 999 user = User.get_by_email(email, case_insensitive=True, cache=True)
999 1000 if user:
1000 1001 if user.first_name or user.last_name:
1001 1002 return '%s %s &lt;%s&gt;' % (
1002 1003 user.first_name, user.last_name, email)
1003 1004 else:
1004 1005 return email
1005 1006 else:
1006 1007 return email
1007 1008 else:
1008 1009 return None
1009 1010
1010 1011
1011 1012 def person_by_id(id_, show_attr="username_and_name"):
1012 1013 # attr to return from fetched user
1013 1014 person_getter = lambda usr: getattr(usr, show_attr)
1014 1015
1015 1016 #maybe it's an ID ?
1016 1017 if str(id_).isdigit() or isinstance(id_, int):
1017 1018 id_ = int(id_)
1018 1019 user = User.get(id_)
1019 1020 if user is not None:
1020 1021 return person_getter(user)
1021 1022 return id_
1022 1023
1023 1024
1024 1025 def gravatar_with_user(request, author, show_disabled=False, tooltip=False):
1025 1026 _render = request.get_partial_renderer('rhodecode:templates/base/base.mako')
1026 1027 return _render('gravatar_with_user', author, show_disabled=show_disabled, tooltip=tooltip)
1027 1028
1028 1029
1029 1030 tags_paterns = OrderedDict((
1030 1031 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
1031 1032 '<div class="metatag" tag="lang">\\2</div>')),
1032 1033
1033 1034 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1034 1035 '<div class="metatag" tag="see">see: \\1 </div>')),
1035 1036
1036 1037 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
1037 1038 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
1038 1039
1039 1040 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
1040 1041 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
1041 1042
1042 1043 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
1043 1044 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
1044 1045
1045 1046 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
1046 1047 '<div class="metatag" tag="state \\1">\\1</div>')),
1047 1048
1048 1049 # label in grey
1049 1050 ('label', (re.compile(r'\[([a-z]+)\]'),
1050 1051 '<div class="metatag" tag="label">\\1</div>')),
1051 1052
1052 1053 # generic catch all in grey
1053 1054 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
1054 1055 '<div class="metatag" tag="generic">\\1</div>')),
1055 1056 ))
1056 1057
1057 1058
1058 1059 def extract_metatags(value):
1059 1060 """
1060 1061 Extract supported meta-tags from given text value
1061 1062 """
1062 1063 tags = []
1063 1064 if not value:
1064 1065 return tags, ''
1065 1066
1066 1067 for key, val in tags_paterns.items():
1067 1068 pat, replace_html = val
1068 1069 tags.extend([(key, x.group()) for x in pat.finditer(value)])
1069 1070 value = pat.sub('', value)
1070 1071
1071 1072 return tags, value
1072 1073
1073 1074
1074 1075 def style_metatag(tag_type, value):
1075 1076 """
1076 1077 converts tags from value into html equivalent
1077 1078 """
1078 1079 if not value:
1079 1080 return ''
1080 1081
1081 1082 html_value = value
1082 1083 tag_data = tags_paterns.get(tag_type)
1083 1084 if tag_data:
1084 1085 pat, replace_html = tag_data
1085 1086 # convert to plain `unicode` instead of a markup tag to be used in
1086 1087 # regex expressions. safe_unicode doesn't work here
1087 1088 html_value = pat.sub(replace_html, unicode(value))
1088 1089
1089 1090 return html_value
1090 1091
1091 1092
1092 1093 def bool2icon(value, show_at_false=True):
1093 1094 """
1094 1095 Returns boolean value of a given value, represented as html element with
1095 1096 classes that will represent icons
1096 1097
1097 1098 :param value: given value to convert to html node
1098 1099 """
1099 1100
1100 1101 if value: # does bool conversion
1101 1102 return HTML.tag('i', class_="icon-true", title='True')
1102 1103 else: # not true as bool
1103 1104 if show_at_false:
1104 1105 return HTML.tag('i', class_="icon-false", title='False')
1105 1106 return HTML.tag('i')
1106 1107
1107 1108
1108 1109 def b64(inp):
1109 1110 return base64.b64encode(inp)
1110 1111
1111 1112 #==============================================================================
1112 1113 # PERMS
1113 1114 #==============================================================================
1114 1115 from rhodecode.lib.auth import (
1115 1116 HasPermissionAny, HasPermissionAll,
1116 1117 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll,
1117 1118 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token,
1118 1119 csrf_token_key, AuthUser)
1119 1120
1120 1121
1121 1122 #==============================================================================
1122 1123 # GRAVATAR URL
1123 1124 #==============================================================================
1124 1125 class InitialsGravatar(object):
1125 1126 def __init__(self, email_address, first_name, last_name, size=30,
1126 1127 background=None, text_color='#fff'):
1127 1128 self.size = size
1128 1129 self.first_name = first_name
1129 1130 self.last_name = last_name
1130 1131 self.email_address = email_address
1131 1132 self.background = background or self.str2color(email_address)
1132 1133 self.text_color = text_color
1133 1134
1134 1135 def get_color_bank(self):
1135 1136 """
1136 1137 returns a predefined list of colors that gravatars can use.
1137 1138 Those are randomized distinct colors that guarantee readability and
1138 1139 uniqueness.
1139 1140
1140 1141 generated with: http://phrogz.net/css/distinct-colors.html
1141 1142 """
1142 1143 return [
1143 1144 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1144 1145 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1145 1146 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1146 1147 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1147 1148 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1148 1149 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1149 1150 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1150 1151 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1151 1152 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1152 1153 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1153 1154 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1154 1155 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1155 1156 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1156 1157 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1157 1158 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1158 1159 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1159 1160 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1160 1161 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1161 1162 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1162 1163 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1163 1164 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1164 1165 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1165 1166 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1166 1167 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1167 1168 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1168 1169 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1169 1170 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1170 1171 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1171 1172 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1172 1173 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1173 1174 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1174 1175 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1175 1176 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1176 1177 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1177 1178 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1178 1179 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1179 1180 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1180 1181 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1181 1182 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1182 1183 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1183 1184 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1184 1185 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1185 1186 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1186 1187 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1187 1188 '#4f8c46', '#368dd9', '#5c0073'
1188 1189 ]
1189 1190
1190 1191 def rgb_to_hex_color(self, rgb_tuple):
1191 1192 """
1192 1193 Converts an rgb_tuple passed to an hex color.
1193 1194
1194 1195 :param rgb_tuple: tuple with 3 ints represents rgb color space
1195 1196 """
1196 1197 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1197 1198
1198 1199 def email_to_int_list(self, email_str):
1199 1200 """
1200 1201 Get every byte of the hex digest value of email and turn it to integer.
1201 1202 It's going to be always between 0-255
1202 1203 """
1203 1204 digest = md5_safe(email_str.lower())
1204 1205 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1205 1206
1206 1207 def pick_color_bank_index(self, email_str, color_bank):
1207 1208 return self.email_to_int_list(email_str)[0] % len(color_bank)
1208 1209
1209 1210 def str2color(self, email_str):
1210 1211 """
1211 1212 Tries to map in a stable algorithm an email to color
1212 1213
1213 1214 :param email_str:
1214 1215 """
1215 1216 color_bank = self.get_color_bank()
1216 1217 # pick position (module it's length so we always find it in the
1217 1218 # bank even if it's smaller than 256 values
1218 1219 pos = self.pick_color_bank_index(email_str, color_bank)
1219 1220 return color_bank[pos]
1220 1221
1221 1222 def normalize_email(self, email_address):
1222 1223 import unicodedata
1223 1224 # default host used to fill in the fake/missing email
1224 1225 default_host = u'localhost'
1225 1226
1226 1227 if not email_address:
1227 1228 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1228 1229
1229 1230 email_address = safe_unicode(email_address)
1230 1231
1231 1232 if u'@' not in email_address:
1232 1233 email_address = u'%s@%s' % (email_address, default_host)
1233 1234
1234 1235 if email_address.endswith(u'@'):
1235 1236 email_address = u'%s%s' % (email_address, default_host)
1236 1237
1237 1238 email_address = unicodedata.normalize('NFKD', email_address)\
1238 1239 .encode('ascii', 'ignore')
1239 1240 return email_address
1240 1241
1241 1242 def get_initials(self):
1242 1243 """
1243 1244 Returns 2 letter initials calculated based on the input.
1244 1245 The algorithm picks first given email address, and takes first letter
1245 1246 of part before @, and then the first letter of server name. In case
1246 1247 the part before @ is in a format of `somestring.somestring2` it replaces
1247 1248 the server letter with first letter of somestring2
1248 1249
1249 1250 In case function was initialized with both first and lastname, this
1250 1251 overrides the extraction from email by first letter of the first and
1251 1252 last name. We add special logic to that functionality, In case Full name
1252 1253 is compound, like Guido Von Rossum, we use last part of the last name
1253 1254 (Von Rossum) picking `R`.
1254 1255
1255 1256 Function also normalizes the non-ascii characters to they ascii
1256 1257 representation, eg Ä„ => A
1257 1258 """
1258 1259 import unicodedata
1259 1260 # replace non-ascii to ascii
1260 1261 first_name = unicodedata.normalize(
1261 1262 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1262 1263 last_name = unicodedata.normalize(
1263 1264 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1264 1265
1265 1266 # do NFKD encoding, and also make sure email has proper format
1266 1267 email_address = self.normalize_email(self.email_address)
1267 1268
1268 1269 # first push the email initials
1269 1270 prefix, server = email_address.split('@', 1)
1270 1271
1271 1272 # check if prefix is maybe a 'first_name.last_name' syntax
1272 1273 _dot_split = prefix.rsplit('.', 1)
1273 1274 if len(_dot_split) == 2 and _dot_split[1]:
1274 1275 initials = [_dot_split[0][0], _dot_split[1][0]]
1275 1276 else:
1276 1277 initials = [prefix[0], server[0]]
1277 1278
1278 1279 # then try to replace either first_name or last_name
1279 1280 fn_letter = (first_name or " ")[0].strip()
1280 1281 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1281 1282
1282 1283 if fn_letter:
1283 1284 initials[0] = fn_letter
1284 1285
1285 1286 if ln_letter:
1286 1287 initials[1] = ln_letter
1287 1288
1288 1289 return ''.join(initials).upper()
1289 1290
1290 1291 def get_img_data_by_type(self, font_family, img_type):
1291 1292 default_user = """
1292 1293 <svg xmlns="http://www.w3.org/2000/svg"
1293 1294 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1294 1295 viewBox="-15 -10 439.165 429.164"
1295 1296
1296 1297 xml:space="preserve"
1297 1298 style="background:{background};" >
1298 1299
1299 1300 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1300 1301 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1301 1302 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1302 1303 168.596,153.916,216.671,
1303 1304 204.583,216.671z" fill="{text_color}"/>
1304 1305 <path d="M407.164,374.717L360.88,
1305 1306 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1306 1307 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1307 1308 15.366-44.203,23.488-69.076,23.488c-24.877,
1308 1309 0-48.762-8.122-69.078-23.488
1309 1310 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1310 1311 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1311 1312 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1312 1313 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1313 1314 19.402-10.527 C409.699,390.129,
1314 1315 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1315 1316 </svg>""".format(
1316 1317 size=self.size,
1317 1318 background='#979797', # @grey4
1318 1319 text_color=self.text_color,
1319 1320 font_family=font_family)
1320 1321
1321 1322 return {
1322 1323 "default_user": default_user
1323 1324 }[img_type]
1324 1325
1325 1326 def get_img_data(self, svg_type=None):
1326 1327 """
1327 1328 generates the svg metadata for image
1328 1329 """
1329 1330 fonts = [
1330 1331 '-apple-system',
1331 1332 'BlinkMacSystemFont',
1332 1333 'Segoe UI',
1333 1334 'Roboto',
1334 1335 'Oxygen-Sans',
1335 1336 'Ubuntu',
1336 1337 'Cantarell',
1337 1338 'Helvetica Neue',
1338 1339 'sans-serif'
1339 1340 ]
1340 1341 font_family = ','.join(fonts)
1341 1342 if svg_type:
1342 1343 return self.get_img_data_by_type(font_family, svg_type)
1343 1344
1344 1345 initials = self.get_initials()
1345 1346 img_data = """
1346 1347 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1347 1348 width="{size}" height="{size}"
1348 1349 style="width: 100%; height: 100%; background-color: {background}"
1349 1350 viewBox="0 0 {size} {size}">
1350 1351 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1351 1352 pointer-events="auto" fill="{text_color}"
1352 1353 font-family="{font_family}"
1353 1354 style="font-weight: 400; font-size: {f_size}px;">{text}
1354 1355 </text>
1355 1356 </svg>""".format(
1356 1357 size=self.size,
1357 1358 f_size=self.size/2.05, # scale the text inside the box nicely
1358 1359 background=self.background,
1359 1360 text_color=self.text_color,
1360 1361 text=initials.upper(),
1361 1362 font_family=font_family)
1362 1363
1363 1364 return img_data
1364 1365
1365 1366 def generate_svg(self, svg_type=None):
1366 1367 img_data = self.get_img_data(svg_type)
1367 1368 return "data:image/svg+xml;base64,%s" % base64.b64encode(img_data)
1368 1369
1369 1370
1370 1371 def initials_gravatar(request, email_address, first_name, last_name, size=30, store_on_disk=False):
1371 1372
1372 1373 svg_type = None
1373 1374 if email_address == User.DEFAULT_USER_EMAIL:
1374 1375 svg_type = 'default_user'
1375 1376
1376 1377 klass = InitialsGravatar(email_address, first_name, last_name, size)
1377 1378
1378 1379 if store_on_disk:
1379 1380 from rhodecode.apps.file_store import utils as store_utils
1380 1381 from rhodecode.apps.file_store.exceptions import FileNotAllowedException, \
1381 1382 FileOverSizeException
1382 1383 from rhodecode.model.db import Session
1383 1384
1384 1385 image_key = md5_safe(email_address.lower()
1385 1386 + first_name.lower() + last_name.lower())
1386 1387
1387 1388 storage = store_utils.get_file_storage(request.registry.settings)
1388 1389 filename = '{}.svg'.format(image_key)
1389 1390 subdir = 'gravatars'
1390 1391 # since final name has a counter, we apply the 0
1391 1392 uid = storage.apply_counter(0, store_utils.uid_filename(filename, randomized=False))
1392 1393 store_uid = os.path.join(subdir, uid)
1393 1394
1394 1395 db_entry = FileStore.get_by_store_uid(store_uid)
1395 1396 if db_entry:
1396 1397 return request.route_path('download_file', fid=store_uid)
1397 1398
1398 1399 img_data = klass.get_img_data(svg_type=svg_type)
1399 1400 img_file = store_utils.bytes_to_file_obj(img_data)
1400 1401
1401 1402 try:
1402 1403 store_uid, metadata = storage.save_file(
1403 1404 img_file, filename, directory=subdir,
1404 1405 extensions=['.svg'], randomized_name=False)
1405 1406 except (FileNotAllowedException, FileOverSizeException):
1406 1407 raise
1407 1408
1408 1409 try:
1409 1410 entry = FileStore.create(
1410 1411 file_uid=store_uid, filename=metadata["filename"],
1411 1412 file_hash=metadata["sha256"], file_size=metadata["size"],
1412 1413 file_display_name=filename,
1413 1414 file_description=u'user gravatar `{}`'.format(safe_unicode(filename)),
1414 1415 hidden=True, check_acl=False, user_id=1
1415 1416 )
1416 1417 Session().add(entry)
1417 1418 Session().commit()
1418 1419 log.debug('Stored upload in DB as %s', entry)
1419 1420 except Exception:
1420 1421 raise
1421 1422
1422 1423 return request.route_path('download_file', fid=store_uid)
1423 1424
1424 1425 else:
1425 1426 return klass.generate_svg(svg_type=svg_type)
1426 1427
1427 1428
1428 1429 def gravatar_external(request, gravatar_url_tmpl, email_address, size=30):
1429 1430 return safe_str(gravatar_url_tmpl)\
1430 1431 .replace('{email}', email_address) \
1431 1432 .replace('{md5email}', md5_safe(email_address.lower())) \
1432 1433 .replace('{netloc}', request.host) \
1433 1434 .replace('{scheme}', request.scheme) \
1434 1435 .replace('{size}', safe_str(size))
1435 1436
1436 1437
1437 1438 def gravatar_url(email_address, size=30, request=None):
1438 1439 request = request or get_current_request()
1439 1440 _use_gravatar = request.call_context.visual.use_gravatar
1440 1441
1441 1442 email_address = email_address or User.DEFAULT_USER_EMAIL
1442 1443 if isinstance(email_address, unicode):
1443 1444 # hashlib crashes on unicode items
1444 1445 email_address = safe_str(email_address)
1445 1446
1446 1447 # empty email or default user
1447 1448 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1448 1449 return initials_gravatar(request, User.DEFAULT_USER_EMAIL, '', '', size=size)
1449 1450
1450 1451 if _use_gravatar:
1451 1452 gravatar_url_tmpl = request.call_context.visual.gravatar_url \
1452 1453 or User.DEFAULT_GRAVATAR_URL
1453 1454 return gravatar_external(request, gravatar_url_tmpl, email_address, size=size)
1454 1455
1455 1456 else:
1456 1457 return initials_gravatar(request, email_address, '', '', size=size)
1457 1458
1458 1459
1459 1460 def breadcrumb_repo_link(repo):
1460 1461 """
1461 1462 Makes a breadcrumbs path link to repo
1462 1463
1463 1464 ex::
1464 1465 group >> subgroup >> repo
1465 1466
1466 1467 :param repo: a Repository instance
1467 1468 """
1468 1469
1469 1470 path = [
1470 1471 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name),
1471 1472 title='last change:{}'.format(format_date(group.last_commit_change)))
1472 1473 for group in repo.groups_with_parents
1473 1474 ] + [
1474 1475 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name),
1475 1476 title='last change:{}'.format(format_date(repo.last_commit_change)))
1476 1477 ]
1477 1478
1478 1479 return literal(' &raquo; '.join(path))
1479 1480
1480 1481
1481 1482 def breadcrumb_repo_group_link(repo_group):
1482 1483 """
1483 1484 Makes a breadcrumbs path link to repo
1484 1485
1485 1486 ex::
1486 1487 group >> subgroup
1487 1488
1488 1489 :param repo_group: a Repository Group instance
1489 1490 """
1490 1491
1491 1492 path = [
1492 1493 link_to(group.name,
1493 1494 route_path('repo_group_home', repo_group_name=group.group_name),
1494 1495 title='last change:{}'.format(format_date(group.last_commit_change)))
1495 1496 for group in repo_group.parents
1496 1497 ] + [
1497 1498 link_to(repo_group.name,
1498 1499 route_path('repo_group_home', repo_group_name=repo_group.group_name),
1499 1500 title='last change:{}'.format(format_date(repo_group.last_commit_change)))
1500 1501 ]
1501 1502
1502 1503 return literal(' &raquo; '.join(path))
1503 1504
1504 1505
1505 1506 def format_byte_size_binary(file_size):
1506 1507 """
1507 1508 Formats file/folder sizes to standard.
1508 1509 """
1509 1510 if file_size is None:
1510 1511 file_size = 0
1511 1512
1512 1513 formatted_size = format_byte_size(file_size, binary=True)
1513 1514 return formatted_size
1514 1515
1515 1516
1516 1517 def urlify_text(text_, safe=True, **href_attrs):
1517 1518 """
1518 1519 Extract urls from text and make html links out of them
1519 1520 """
1520 1521
1521 1522 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1522 1523 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1523 1524
1524 1525 def url_func(match_obj):
1525 1526 url_full = match_obj.groups()[0]
1526 1527 a_options = dict(href_attrs)
1527 1528 a_options['href'] = url_full
1528 1529 a_text = url_full
1529 1530 return HTML.tag("a", a_text, **a_options)
1530 1531
1531 1532 _new_text = url_pat.sub(url_func, text_)
1532 1533
1533 1534 if safe:
1534 1535 return literal(_new_text)
1535 1536 return _new_text
1536 1537
1537 1538
1538 1539 def urlify_commits(text_, repo_name):
1539 1540 """
1540 1541 Extract commit ids from text and make link from them
1541 1542
1542 1543 :param text_:
1543 1544 :param repo_name: repo name to build the URL with
1544 1545 """
1545 1546
1546 1547 url_pat = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1547 1548
1548 1549 def url_func(match_obj):
1549 1550 commit_id = match_obj.groups()[1]
1550 1551 pref = match_obj.groups()[0]
1551 1552 suf = match_obj.groups()[2]
1552 1553
1553 1554 tmpl = (
1554 1555 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-alt="%(hovercard_alt)s" data-hovercard-url="%(hovercard_url)s">'
1555 1556 '%(commit_id)s</a>%(suf)s'
1556 1557 )
1557 1558 return tmpl % {
1558 1559 'pref': pref,
1559 1560 'cls': 'revision-link',
1560 1561 'url': route_url(
1561 1562 'repo_commit', repo_name=repo_name, commit_id=commit_id),
1562 1563 'commit_id': commit_id,
1563 1564 'suf': suf,
1564 1565 'hovercard_alt': 'Commit: {}'.format(commit_id),
1565 1566 'hovercard_url': route_url(
1566 1567 'hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)
1567 1568 }
1568 1569
1569 1570 new_text = url_pat.sub(url_func, text_)
1570 1571
1571 1572 return new_text
1572 1573
1573 1574
1574 1575 def _process_url_func(match_obj, repo_name, uid, entry,
1575 1576 return_raw_data=False, link_format='html'):
1576 1577 pref = ''
1577 1578 if match_obj.group().startswith(' '):
1578 1579 pref = ' '
1579 1580
1580 1581 issue_id = ''.join(match_obj.groups())
1581 1582
1582 1583 if link_format == 'html':
1583 1584 tmpl = (
1584 1585 '%(pref)s<a class="tooltip %(cls)s" href="%(url)s" title="%(title)s">'
1585 1586 '%(issue-prefix)s%(id-repr)s'
1586 1587 '</a>')
1587 1588 elif link_format == 'html+hovercard':
1588 1589 tmpl = (
1589 1590 '%(pref)s<a class="tooltip-hovercard %(cls)s" href="%(url)s" data-hovercard-url="%(hovercard_url)s">'
1590 1591 '%(issue-prefix)s%(id-repr)s'
1591 1592 '</a>')
1592 1593 elif link_format in ['rst', 'rst+hovercard']:
1593 1594 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1594 1595 elif link_format in ['markdown', 'markdown+hovercard']:
1595 1596 tmpl = '[%(pref)s%(issue-prefix)s%(id-repr)s](%(url)s)'
1596 1597 else:
1597 1598 raise ValueError('Bad link_format:{}'.format(link_format))
1598 1599
1599 1600 (repo_name_cleaned,
1600 1601 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(repo_name)
1601 1602
1602 1603 # variables replacement
1603 1604 named_vars = {
1604 1605 'id': issue_id,
1605 1606 'repo': repo_name,
1606 1607 'repo_name': repo_name_cleaned,
1607 1608 'group_name': parent_group_name,
1608 1609 # set dummy keys so we always have them
1609 1610 'hostname': '',
1610 1611 'netloc': '',
1611 1612 'scheme': ''
1612 1613 }
1613 1614
1614 1615 request = get_current_request()
1615 1616 if request:
1616 1617 # exposes, hostname, netloc, scheme
1617 1618 host_data = get_host_info(request)
1618 1619 named_vars.update(host_data)
1619 1620
1620 1621 # named regex variables
1621 1622 named_vars.update(match_obj.groupdict())
1622 1623 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1623 1624 desc = string.Template(escape(entry['desc'])).safe_substitute(**named_vars)
1624 1625 hovercard_url = string.Template(entry.get('hovercard_url', '')).safe_substitute(**named_vars)
1625 1626
1626 1627 def quote_cleaner(input_str):
1627 1628 """Remove quotes as it's HTML"""
1628 1629 return input_str.replace('"', '')
1629 1630
1630 1631 data = {
1631 1632 'pref': pref,
1632 1633 'cls': quote_cleaner('issue-tracker-link'),
1633 1634 'url': quote_cleaner(_url),
1634 1635 'id-repr': issue_id,
1635 1636 'issue-prefix': entry['pref'],
1636 1637 'serv': entry['url'],
1637 1638 'title': bleach.clean(desc, strip=True),
1638 1639 'hovercard_url': hovercard_url
1639 1640 }
1640 1641
1641 1642 if return_raw_data:
1642 1643 return {
1643 1644 'id': issue_id,
1644 1645 'url': _url
1645 1646 }
1646 1647 return tmpl % data
1647 1648
1648 1649
1649 1650 def get_active_pattern_entries(repo_name):
1650 1651 repo = None
1651 1652 if repo_name:
1652 1653 # Retrieving repo_name to avoid invalid repo_name to explode on
1653 1654 # IssueTrackerSettingsModel but still passing invalid name further down
1654 1655 repo = Repository.get_by_repo_name(repo_name, cache=True)
1655 1656
1656 1657 settings_model = IssueTrackerSettingsModel(repo=repo)
1657 1658 active_entries = settings_model.get_settings(cache=True)
1658 1659 return active_entries
1659 1660
1660 1661
1661 1662 pr_pattern_re = regex.compile(r'(?:(?:^!)|(?: !))(\d+)')
1662 1663
1663 1664 allowed_link_formats = [
1664 1665 'html', 'rst', 'markdown', 'html+hovercard', 'rst+hovercard', 'markdown+hovercard']
1665 1666
1666 1667
1667 1668 def process_patterns(text_string, repo_name, link_format='html', active_entries=None):
1668 1669
1669 1670 if link_format not in allowed_link_formats:
1670 1671 raise ValueError('Link format can be only one of:{} got {}'.format(
1671 1672 allowed_link_formats, link_format))
1672 1673
1673 1674 if active_entries is None:
1674 1675 log.debug('Fetch active issue tracker patterns for repo: %s', repo_name)
1675 1676 active_entries = get_active_pattern_entries(repo_name)
1676 1677
1677 1678 issues_data = []
1678 1679 errors = []
1679 1680 new_text = text_string
1680 1681
1681 1682 log.debug('Got %s entries to process', len(active_entries))
1682 1683 for uid, entry in active_entries.items():
1683 1684 log.debug('found issue tracker entry with uid %s', uid)
1684 1685
1685 1686 if not (entry['pat'] and entry['url']):
1686 1687 log.debug('skipping due to missing data')
1687 1688 continue
1688 1689
1689 1690 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s',
1690 1691 uid, entry['pat'], entry['url'], entry['pref'])
1691 1692
1692 1693 if entry.get('pat_compiled'):
1693 1694 pattern = entry['pat_compiled']
1694 1695 else:
1695 1696 try:
1696 1697 pattern = regex.compile(r'%s' % entry['pat'])
1697 1698 except regex.error as e:
1698 1699 regex_err = ValueError('{}:{}'.format(entry['pat'], e))
1699 1700 log.exception('issue tracker pattern: `%s` failed to compile', regex_err)
1700 1701 errors.append(regex_err)
1701 1702 continue
1702 1703
1703 1704 data_func = partial(
1704 1705 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1705 1706 return_raw_data=True)
1706 1707
1707 1708 for match_obj in pattern.finditer(text_string):
1708 1709 issues_data.append(data_func(match_obj))
1709 1710
1710 1711 url_func = partial(
1711 1712 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1712 1713 link_format=link_format)
1713 1714
1714 1715 new_text = pattern.sub(url_func, new_text)
1715 1716 log.debug('processed prefix:uid `%s`', uid)
1716 1717
1717 1718 # finally use global replace, eg !123 -> pr-link, those will not catch
1718 1719 # if already similar pattern exists
1719 1720 server_url = '${scheme}://${netloc}'
1720 1721 pr_entry = {
1721 1722 'pref': '!',
1722 1723 'url': server_url + '/_admin/pull-requests/${id}',
1723 1724 'desc': 'Pull Request !${id}',
1724 1725 'hovercard_url': server_url + '/_hovercard/pull_request/${id}'
1725 1726 }
1726 1727 pr_url_func = partial(
1727 1728 _process_url_func, repo_name=repo_name, entry=pr_entry, uid=None,
1728 1729 link_format=link_format+'+hovercard')
1729 1730 new_text = pr_pattern_re.sub(pr_url_func, new_text)
1730 1731 log.debug('processed !pr pattern')
1731 1732
1732 1733 return new_text, issues_data, errors
1733 1734
1734 1735
1735 1736 def urlify_commit_message(commit_text, repository=None, active_pattern_entries=None,
1736 issues_container=None, error_container=None):
1737 issues_container_callback=None, error_container=None):
1737 1738 """
1738 1739 Parses given text message and makes proper links.
1739 1740 issues are linked to given issue-server, and rest is a commit link
1740 1741 """
1741 1742
1742 1743 def escaper(_text):
1743 1744 return _text.replace('<', '&lt;').replace('>', '&gt;')
1744 1745
1745 1746 new_text = escaper(commit_text)
1746 1747
1747 1748 # extract http/https links and make them real urls
1748 1749 new_text = urlify_text(new_text, safe=False)
1749 1750
1750 1751 # urlify commits - extract commit ids and make link out of them, if we have
1751 1752 # the scope of repository present.
1752 1753 if repository:
1753 1754 new_text = urlify_commits(new_text, repository)
1754 1755
1755 1756 # process issue tracker patterns
1756 1757 new_text, issues, errors = process_patterns(
1757 1758 new_text, repository or '', active_entries=active_pattern_entries)
1758 1759
1759 if issues_container is not None:
1760 issues_container.extend(issues)
1760 if issues_container_callback is not None:
1761 for issue in issues:
1762 issues_container_callback(issue)
1761 1763
1762 1764 if error_container is not None:
1763 1765 error_container.extend(errors)
1764 1766
1765 1767 return literal(new_text)
1766 1768
1767 1769
1768 1770 def render_binary(repo_name, file_obj):
1769 1771 """
1770 1772 Choose how to render a binary file
1771 1773 """
1772 1774
1773 1775 # unicode
1774 1776 filename = file_obj.name
1775 1777
1776 1778 # images
1777 1779 for ext in ['*.png', '*.jpeg', '*.jpg', '*.ico', '*.gif']:
1778 1780 if fnmatch.fnmatch(filename, pat=ext):
1779 1781 src = route_path(
1780 1782 'repo_file_raw', repo_name=repo_name,
1781 1783 commit_id=file_obj.commit.raw_id,
1782 1784 f_path=file_obj.path)
1783 1785
1784 1786 return literal(
1785 1787 '<img class="rendered-binary" alt="rendered-image" src="{}">'.format(src))
1786 1788
1787 1789
1788 1790 def renderer_from_filename(filename, exclude=None):
1789 1791 """
1790 1792 choose a renderer based on filename, this works only for text based files
1791 1793 """
1792 1794
1793 1795 # ipython
1794 1796 for ext in ['*.ipynb']:
1795 1797 if fnmatch.fnmatch(filename, pat=ext):
1796 1798 return 'jupyter'
1797 1799
1798 1800 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1799 1801 if is_markup:
1800 1802 return is_markup
1801 1803 return None
1802 1804
1803 1805
1804 1806 def render(source, renderer='rst', mentions=False, relative_urls=None,
1805 repo_name=None, active_pattern_entries=None, issues_container=None):
1807 repo_name=None, active_pattern_entries=None, issues_container_callback=None):
1806 1808
1807 1809 def maybe_convert_relative_links(html_source):
1808 1810 if relative_urls:
1809 1811 return relative_links(html_source, relative_urls)
1810 1812 return html_source
1811 1813
1812 1814 if renderer == 'plain':
1813 1815 return literal(
1814 1816 MarkupRenderer.plain(source, leading_newline=False))
1815 1817
1816 1818 elif renderer == 'rst':
1817 1819 if repo_name:
1818 1820 # process patterns on comments if we pass in repo name
1819 1821 source, issues, errors = process_patterns(
1820 1822 source, repo_name, link_format='rst',
1821 1823 active_entries=active_pattern_entries)
1822 if issues_container is not None:
1823 issues_container.extend(issues)
1824 if issues_container_callback is not None:
1825 for issue in issues:
1826 issues_container_callback(issue)
1824 1827
1825 1828 return literal(
1826 1829 '<div class="rst-block">%s</div>' %
1827 1830 maybe_convert_relative_links(
1828 1831 MarkupRenderer.rst(source, mentions=mentions)))
1829 1832
1830 1833 elif renderer == 'markdown':
1831 1834 if repo_name:
1832 1835 # process patterns on comments if we pass in repo name
1833 1836 source, issues, errors = process_patterns(
1834 1837 source, repo_name, link_format='markdown',
1835 1838 active_entries=active_pattern_entries)
1836 if issues_container is not None:
1837 issues_container.extend(issues)
1839 if issues_container_callback is not None:
1840 for issue in issues:
1841 issues_container_callback(issue)
1842
1838 1843
1839 1844 return literal(
1840 1845 '<div class="markdown-block">%s</div>' %
1841 1846 maybe_convert_relative_links(
1842 1847 MarkupRenderer.markdown(source, flavored=True,
1843 1848 mentions=mentions)))
1844 1849
1845 1850 elif renderer == 'jupyter':
1846 1851 return literal(
1847 1852 '<div class="ipynb">%s</div>' %
1848 1853 maybe_convert_relative_links(
1849 1854 MarkupRenderer.jupyter(source)))
1850 1855
1851 1856 # None means just show the file-source
1852 1857 return None
1853 1858
1854 1859
1855 1860 def commit_status(repo, commit_id):
1856 1861 return ChangesetStatusModel().get_status(repo, commit_id)
1857 1862
1858 1863
1859 1864 def commit_status_lbl(commit_status):
1860 1865 return dict(ChangesetStatus.STATUSES).get(commit_status)
1861 1866
1862 1867
1863 1868 def commit_time(repo_name, commit_id):
1864 1869 repo = Repository.get_by_repo_name(repo_name)
1865 1870 commit = repo.get_commit(commit_id=commit_id)
1866 1871 return commit.date
1867 1872
1868 1873
1869 1874 def get_permission_name(key):
1870 1875 return dict(Permission.PERMS).get(key)
1871 1876
1872 1877
1873 1878 def journal_filter_help(request):
1874 1879 _ = request.translate
1875 1880 from rhodecode.lib.audit_logger import ACTIONS
1876 1881 actions = '\n'.join(textwrap.wrap(', '.join(sorted(ACTIONS.keys())), 80))
1877 1882
1878 1883 return _(
1879 1884 'Example filter terms:\n' +
1880 1885 ' repository:vcs\n' +
1881 1886 ' username:marcin\n' +
1882 1887 ' username:(NOT marcin)\n' +
1883 1888 ' action:*push*\n' +
1884 1889 ' ip:127.0.0.1\n' +
1885 1890 ' date:20120101\n' +
1886 1891 ' date:[20120101100000 TO 20120102]\n' +
1887 1892 '\n' +
1888 1893 'Actions: {actions}\n' +
1889 1894 '\n' +
1890 1895 'Generate wildcards using \'*\' character:\n' +
1891 1896 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1892 1897 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1893 1898 '\n' +
1894 1899 'Optional AND / OR operators in queries\n' +
1895 1900 ' "repository:vcs OR repository:test"\n' +
1896 1901 ' "username:test AND repository:test*"\n'
1897 1902 ).format(actions=actions)
1898 1903
1899 1904
1900 1905 def not_mapped_error(repo_name):
1901 1906 from rhodecode.translation import _
1902 1907 flash(_('%s repository is not mapped to db perhaps'
1903 1908 ' it was created or renamed from the filesystem'
1904 1909 ' please run the application again'
1905 1910 ' in order to rescan repositories') % repo_name, category='error')
1906 1911
1907 1912
1908 1913 def ip_range(ip_addr):
1909 1914 from rhodecode.model.db import UserIpMap
1910 1915 s, e = UserIpMap._get_ip_range(ip_addr)
1911 1916 return '%s - %s' % (s, e)
1912 1917
1913 1918
1914 1919 def form(url, method='post', needs_csrf_token=True, **attrs):
1915 1920 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1916 1921 if method.lower() != 'get' and needs_csrf_token:
1917 1922 raise Exception(
1918 1923 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1919 1924 'CSRF token. If the endpoint does not require such token you can ' +
1920 1925 'explicitly set the parameter needs_csrf_token to false.')
1921 1926
1922 1927 return insecure_form(url, method=method, **attrs)
1923 1928
1924 1929
1925 1930 def secure_form(form_url, method="POST", multipart=False, **attrs):
1926 1931 """Start a form tag that points the action to an url. This
1927 1932 form tag will also include the hidden field containing
1928 1933 the auth token.
1929 1934
1930 1935 The url options should be given either as a string, or as a
1931 1936 ``url()`` function. The method for the form defaults to POST.
1932 1937
1933 1938 Options:
1934 1939
1935 1940 ``multipart``
1936 1941 If set to True, the enctype is set to "multipart/form-data".
1937 1942 ``method``
1938 1943 The method to use when submitting the form, usually either
1939 1944 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1940 1945 hidden input with name _method is added to simulate the verb
1941 1946 over POST.
1942 1947
1943 1948 """
1944 1949
1945 1950 if 'request' in attrs:
1946 1951 session = attrs['request'].session
1947 1952 del attrs['request']
1948 1953 else:
1949 1954 raise ValueError(
1950 1955 'Calling this form requires request= to be passed as argument')
1951 1956
1952 1957 _form = insecure_form(form_url, method, multipart, **attrs)
1953 1958 token = literal(
1954 1959 '<input type="hidden" name="{}" value="{}">'.format(
1955 1960 csrf_token_key, get_csrf_token(session)))
1956 1961
1957 1962 return literal("%s\n%s" % (_form, token))
1958 1963
1959 1964
1960 1965 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1961 1966 select_html = select(name, selected, options, **attrs)
1962 1967
1963 1968 select2 = """
1964 1969 <script>
1965 1970 $(document).ready(function() {
1966 1971 $('#%s').select2({
1967 1972 containerCssClass: 'drop-menu %s',
1968 1973 dropdownCssClass: 'drop-menu-dropdown',
1969 1974 dropdownAutoWidth: true%s
1970 1975 });
1971 1976 });
1972 1977 </script>
1973 1978 """
1974 1979
1975 1980 filter_option = """,
1976 1981 minimumResultsForSearch: -1
1977 1982 """
1978 1983 input_id = attrs.get('id') or name
1979 1984 extra_classes = ' '.join(attrs.pop('extra_classes', []))
1980 1985 filter_enabled = "" if enable_filter else filter_option
1981 1986 select_script = literal(select2 % (input_id, extra_classes, filter_enabled))
1982 1987
1983 1988 return literal(select_html+select_script)
1984 1989
1985 1990
1986 1991 def get_visual_attr(tmpl_context_var, attr_name):
1987 1992 """
1988 1993 A safe way to get a variable from visual variable of template context
1989 1994
1990 1995 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1991 1996 :param attr_name: name of the attribute we fetch from the c.visual
1992 1997 """
1993 1998 visual = getattr(tmpl_context_var, 'visual', None)
1994 1999 if not visual:
1995 2000 return
1996 2001 else:
1997 2002 return getattr(visual, attr_name, None)
1998 2003
1999 2004
2000 2005 def get_last_path_part(file_node):
2001 2006 if not file_node.path:
2002 2007 return u'/'
2003 2008
2004 2009 path = safe_unicode(file_node.path.split('/')[-1])
2005 2010 return u'../' + path
2006 2011
2007 2012
2008 2013 def route_url(*args, **kwargs):
2009 2014 """
2010 2015 Wrapper around pyramids `route_url` (fully qualified url) function.
2011 2016 """
2012 2017 req = get_current_request()
2013 2018 return req.route_url(*args, **kwargs)
2014 2019
2015 2020
2016 2021 def route_path(*args, **kwargs):
2017 2022 """
2018 2023 Wrapper around pyramids `route_path` function.
2019 2024 """
2020 2025 req = get_current_request()
2021 2026 return req.route_path(*args, **kwargs)
2022 2027
2023 2028
2024 2029 def route_path_or_none(*args, **kwargs):
2025 2030 try:
2026 2031 return route_path(*args, **kwargs)
2027 2032 except KeyError:
2028 2033 return None
2029 2034
2030 2035
2031 2036 def current_route_path(request, **kw):
2032 2037 new_args = request.GET.mixed()
2033 2038 new_args.update(kw)
2034 2039 return request.current_route_path(_query=new_args)
2035 2040
2036 2041
2037 2042 def curl_api_example(method, args):
2038 2043 args_json = json.dumps(OrderedDict([
2039 2044 ('id', 1),
2040 2045 ('auth_token', 'SECRET'),
2041 2046 ('method', method),
2042 2047 ('args', args)
2043 2048 ]))
2044 2049
2045 2050 return "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{args_json}'".format(
2046 2051 api_url=route_url('apiv2'),
2047 2052 args_json=args_json
2048 2053 )
2049 2054
2050 2055
2051 2056 def api_call_example(method, args):
2052 2057 """
2053 2058 Generates an API call example via CURL
2054 2059 """
2055 2060 curl_call = curl_api_example(method, args)
2056 2061
2057 2062 return literal(
2058 2063 curl_call +
2059 2064 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2060 2065 "and needs to be of `api calls` role."
2061 2066 .format(token_url=route_url('my_account_auth_tokens')))
2062 2067
2063 2068
2064 2069 def notification_description(notification, request):
2065 2070 """
2066 2071 Generate notification human readable description based on notification type
2067 2072 """
2068 2073 from rhodecode.model.notification import NotificationModel
2069 2074 return NotificationModel().make_description(
2070 2075 notification, translate=request.translate)
2071 2076
2072 2077
2073 2078 def go_import_header(request, db_repo=None):
2074 2079 """
2075 2080 Creates a header for go-import functionality in Go Lang
2076 2081 """
2077 2082
2078 2083 if not db_repo:
2079 2084 return
2080 2085 if 'go-get' not in request.GET:
2081 2086 return
2082 2087
2083 2088 clone_url = db_repo.clone_url()
2084 2089 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2085 2090 # we have a repo and go-get flag,
2086 2091 return literal('<meta name="go-import" content="{} {} {}">'.format(
2087 2092 prefix, db_repo.repo_type, clone_url))
2088 2093
2089 2094
2090 2095 def reviewer_as_json(*args, **kwargs):
2091 2096 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2092 2097 return _reviewer_as_json(*args, **kwargs)
2093 2098
2094 2099
2095 2100 def get_repo_view_type(request):
2096 2101 route_name = request.matched_route.name
2097 2102 route_to_view_type = {
2098 2103 'repo_changelog': 'commits',
2099 2104 'repo_commits': 'commits',
2100 2105 'repo_files': 'files',
2101 2106 'repo_summary': 'summary',
2102 2107 'repo_commit': 'commit'
2103 2108 }
2104 2109
2105 2110 return route_to_view_type.get(route_name)
2106 2111
2107 2112
2108 2113 def is_active(menu_entry, selected):
2109 2114 """
2110 2115 Returns active class for selecting menus in templates
2111 2116 <li class=${h.is_active('settings', current_active)}></li>
2112 2117 """
2113 2118 if not isinstance(menu_entry, list):
2114 2119 menu_entry = [menu_entry]
2115 2120
2116 2121 if selected in menu_entry:
2117 2122 return "active"
2123
2124
2125 class IssuesRegistry(object):
2126 """
2127 issue_registry = IssuesRegistry()
2128 some_func(issues_callback=issues_registry(...))
2129 """
2130
2131 def __init__(self):
2132 self.issues = []
2133 self.unique_issues = collections.defaultdict(lambda: [])
2134
2135 def __call__(self, commit_dict=None):
2136 def callback(issue):
2137 if commit_dict and issue:
2138 issue['commit'] = commit_dict
2139 self.issues.append(issue)
2140 self.unique_issues[issue['id']].append(issue)
2141 return callback
2142
2143 def get_issues(self):
2144 return self.issues
2145
2146 @property
2147 def issues_unique_count(self):
2148 return len(set(i['id'] for i in self.issues))
@@ -1,267 +1,280 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import gzip
22 22 import shutil
23 23 import logging
24 24 import tempfile
25 25 import urlparse
26 26
27 27 from webob.exc import HTTPNotFound
28 28
29 29 import rhodecode
30 30 from rhodecode.lib.middleware.appenlight import wrap_in_appenlight_if_enabled
31 31 from rhodecode.lib.middleware.simplegit import SimpleGit, GIT_PROTO_PAT
32 32 from rhodecode.lib.middleware.simplehg import SimpleHg
33 33 from rhodecode.lib.middleware.simplesvn import SimpleSvn
34 34 from rhodecode.model.settings import VcsSettingsModel
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38 VCS_TYPE_KEY = '_rc_vcs_type'
39 39 VCS_TYPE_SKIP = '_rc_vcs_skip'
40 40
41 41
42 42 def is_git(environ):
43 43 """
44 44 Returns True if requests should be handled by GIT wsgi middleware
45 45 """
46 46 is_git_path = GIT_PROTO_PAT.match(environ['PATH_INFO'])
47 47 log.debug(
48 48 'request path: `%s` detected as GIT PROTOCOL %s', environ['PATH_INFO'],
49 49 is_git_path is not None)
50 50
51 51 return is_git_path
52 52
53 53
54 54 def is_hg(environ):
55 55 """
56 56 Returns True if requests target is mercurial server - header
57 57 ``HTTP_ACCEPT`` of such request would start with ``application/mercurial``.
58 58 """
59 59 is_hg_path = False
60 60
61 61 http_accept = environ.get('HTTP_ACCEPT')
62 62
63 63 if http_accept and http_accept.startswith('application/mercurial'):
64 64 query = urlparse.parse_qs(environ['QUERY_STRING'])
65 65 if 'cmd' in query:
66 66 is_hg_path = True
67 67
68 68 log.debug(
69 69 'request path: `%s` detected as HG PROTOCOL %s', environ['PATH_INFO'],
70 70 is_hg_path)
71 71
72 72 return is_hg_path
73 73
74 74
75 75 def is_svn(environ):
76 76 """
77 77 Returns True if requests target is Subversion server
78 78 """
79 79
80 80 http_dav = environ.get('HTTP_DAV', '')
81 81 magic_path_segment = rhodecode.CONFIG.get(
82 82 'rhodecode_subversion_magic_path', '/!svn')
83 83 is_svn_path = (
84 84 'subversion' in http_dav or
85 85 magic_path_segment in environ['PATH_INFO']
86 86 or environ['REQUEST_METHOD'] in ['PROPFIND', 'PROPPATCH']
87 87 )
88 88 log.debug(
89 89 'request path: `%s` detected as SVN PROTOCOL %s', environ['PATH_INFO'],
90 90 is_svn_path)
91 91
92 92 return is_svn_path
93 93
94 94
95 95 class GunzipMiddleware(object):
96 96 """
97 97 WSGI middleware that unzips gzip-encoded requests before
98 98 passing on to the underlying application.
99 99 """
100 100
101 101 def __init__(self, application):
102 102 self.app = application
103 103
104 104 def __call__(self, environ, start_response):
105 105 accepts_encoding_header = environ.get('HTTP_CONTENT_ENCODING', b'')
106 106
107 107 if b'gzip' in accepts_encoding_header:
108 108 log.debug('gzip detected, now running gunzip wrapper')
109 109 wsgi_input = environ['wsgi.input']
110 110
111 111 if not hasattr(environ['wsgi.input'], 'seek'):
112 112 # The gzip implementation in the standard library of Python 2.x
113 113 # requires the '.seek()' and '.tell()' methods to be available
114 114 # on the input stream. Read the data into a temporary file to
115 115 # work around this limitation.
116 116
117 117 wsgi_input = tempfile.SpooledTemporaryFile(64 * 1024 * 1024)
118 118 shutil.copyfileobj(environ['wsgi.input'], wsgi_input)
119 119 wsgi_input.seek(0)
120 120
121 121 environ['wsgi.input'] = gzip.GzipFile(fileobj=wsgi_input, mode='r')
122 122 # since we "Ungzipped" the content we say now it's no longer gzip
123 123 # content encoding
124 124 del environ['HTTP_CONTENT_ENCODING']
125 125
126 126 # content length has changes ? or i'm not sure
127 127 if 'CONTENT_LENGTH' in environ:
128 128 del environ['CONTENT_LENGTH']
129 129 else:
130 130 log.debug('content not gzipped, gzipMiddleware passing '
131 131 'request further')
132 132 return self.app(environ, start_response)
133 133
134 134
135 135 def is_vcs_call(environ):
136 136 if VCS_TYPE_KEY in environ:
137 137 raw_type = environ[VCS_TYPE_KEY]
138 138 return raw_type and raw_type != VCS_TYPE_SKIP
139 139 return False
140 140
141 141
142 142 def get_path_elem(route_path):
143 143 if not route_path:
144 144 return None
145 145
146 146 cleaned_route_path = route_path.lstrip('/')
147 147 if cleaned_route_path:
148 148 cleaned_route_path_elems = cleaned_route_path.split('/')
149 149 if cleaned_route_path_elems:
150 150 return cleaned_route_path_elems[0]
151 151 return None
152 152
153 153
154 154 def detect_vcs_request(environ, backends):
155 155 checks = {
156 156 'hg': (is_hg, SimpleHg),
157 157 'git': (is_git, SimpleGit),
158 158 'svn': (is_svn, SimpleSvn),
159 159 }
160 160 handler = None
161 161 # List of path views first chunk we don't do any checks
162 162 white_list = [
163 163 # e.g /_file_store/download
164 '_file_store'
164 '_file_store',
165
166 # static files no detection
167 '_static',
168
169 # full channelstream connect should be VCS skipped
170 '_admin/channelstream/connect',
165 171 ]
166 172
167 173 path_info = environ['PATH_INFO']
168 174
169 if get_path_elem(path_info) in white_list:
175 path_elem = get_path_elem(path_info)
176
177 if path_elem in white_list:
170 178 log.debug('path `%s` in whitelist, skipping...', path_info)
171 179 return handler
172 180
181 path_url = path_info.lstrip('/')
182 if path_url in white_list:
183 log.debug('full url path `%s` in whitelist, skipping...', path_url)
184 return handler
185
173 186 if VCS_TYPE_KEY in environ:
174 187 raw_type = environ[VCS_TYPE_KEY]
175 188 if raw_type == VCS_TYPE_SKIP:
176 189 log.debug('got `skip` marker for vcs detection, skipping...')
177 190 return handler
178 191
179 192 _check, handler = checks.get(raw_type) or [None, None]
180 193 if handler:
181 194 log.debug('got handler:%s from environ', handler)
182 195
183 196 if not handler:
184 log.debug('request start: checking if request is of VCS type in order: %s', backends)
197 log.debug('request start: checking if request for `%s` is of VCS type in order: %s', path_elem, backends)
185 198 for vcs_type in backends:
186 199 vcs_check, _handler = checks[vcs_type]
187 200 if vcs_check(environ):
188 201 log.debug('vcs handler found %s', _handler)
189 202 handler = _handler
190 203 break
191 204
192 205 return handler
193 206
194 207
195 208 class VCSMiddleware(object):
196 209
197 210 def __init__(self, app, registry, config, appenlight_client):
198 211 self.application = app
199 212 self.registry = registry
200 213 self.config = config
201 214 self.appenlight_client = appenlight_client
202 215 self.use_gzip = True
203 216 # order in which we check the middlewares, based on vcs.backends config
204 217 self.check_middlewares = config['vcs.backends']
205 218
206 219 def vcs_config(self, repo_name=None):
207 220 """
208 221 returns serialized VcsSettings
209 222 """
210 223 try:
211 224 return VcsSettingsModel(
212 225 repo=repo_name).get_ui_settings_as_config_obj()
213 226 except Exception:
214 227 pass
215 228
216 229 def wrap_in_gzip_if_enabled(self, app, config):
217 230 if self.use_gzip:
218 231 app = GunzipMiddleware(app)
219 232 return app
220 233
221 234 def _get_handler_app(self, environ):
222 235 app = None
223 236 log.debug('VCSMiddleware: detecting vcs type.')
224 237 handler = detect_vcs_request(environ, self.check_middlewares)
225 238 if handler:
226 239 app = handler(self.config, self.registry)
227 240
228 241 return app
229 242
230 243 def __call__(self, environ, start_response):
231 244 # check if we handle one of interesting protocols, optionally extract
232 245 # specific vcsSettings and allow changes of how things are wrapped
233 246 vcs_handler = self._get_handler_app(environ)
234 247 if vcs_handler:
235 248 # translate the _REPO_ID into real repo NAME for usage
236 249 # in middleware
237 250 environ['PATH_INFO'] = vcs_handler._get_by_id(environ['PATH_INFO'])
238 251
239 252 # Set acl, url and vcs repo names.
240 253 vcs_handler.set_repo_names(environ)
241 254
242 255 # register repo config back to the handler
243 256 vcs_conf = self.vcs_config(vcs_handler.acl_repo_name)
244 257 # maybe damaged/non existent settings. We still want to
245 258 # pass that point to validate on is_valid_and_existing_repo
246 259 # and return proper HTTP Code back to client
247 260 if vcs_conf:
248 261 vcs_handler.repo_vcs_config = vcs_conf
249 262
250 263 # check for type, presence in database and on filesystem
251 264 if not vcs_handler.is_valid_and_existing_repo(
252 265 vcs_handler.acl_repo_name,
253 266 vcs_handler.base_path,
254 267 vcs_handler.SCM):
255 268 return HTTPNotFound()(environ, start_response)
256 269
257 270 environ['REPO_NAME'] = vcs_handler.url_repo_name
258 271
259 272 # Wrap handler in middlewares if they are enabled.
260 273 vcs_handler = self.wrap_in_gzip_if_enabled(
261 274 vcs_handler, self.config)
262 275 vcs_handler, _ = wrap_in_appenlight_if_enabled(
263 276 vcs_handler, self.config, self.appenlight_client)
264 277
265 278 return vcs_handler(environ, start_response)
266 279
267 280 return self.application(environ, start_response)
@@ -1,1070 +1,1074 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Some simple helper functions
24 24 """
25 25
26 26 import collections
27 27 import datetime
28 28 import dateutil.relativedelta
29 29 import hashlib
30 30 import logging
31 31 import re
32 32 import sys
33 33 import time
34 34 import urllib
35 35 import urlobject
36 36 import uuid
37 37 import getpass
38 38 from functools import update_wrapper, partial
39 39
40 40 import pygments.lexers
41 41 import sqlalchemy
42 42 import sqlalchemy.engine.url
43 43 import sqlalchemy.exc
44 44 import sqlalchemy.sql
45 45 import webob
46 46 import pyramid.threadlocal
47 47 from pyramid import compat
48 48 from pyramid.settings import asbool
49 49
50 50 import rhodecode
51 51 from rhodecode.translation import _, _pluralize
52 52
53 53
54 54 def md5(s):
55 55 return hashlib.md5(s).hexdigest()
56 56
57 57
58 58 def md5_safe(s):
59 59 return md5(safe_str(s))
60 60
61 61
62 62 def sha1(s):
63 63 return hashlib.sha1(s).hexdigest()
64 64
65 65
66 66 def sha1_safe(s):
67 67 return sha1(safe_str(s))
68 68
69 69
70 70 def __get_lem(extra_mapping=None):
71 71 """
72 72 Get language extension map based on what's inside pygments lexers
73 73 """
74 74 d = collections.defaultdict(lambda: [])
75 75
76 76 def __clean(s):
77 77 s = s.lstrip('*')
78 78 s = s.lstrip('.')
79 79
80 80 if s.find('[') != -1:
81 81 exts = []
82 82 start, stop = s.find('['), s.find(']')
83 83
84 84 for suffix in s[start + 1:stop]:
85 85 exts.append(s[:s.find('[')] + suffix)
86 86 return [e.lower() for e in exts]
87 87 else:
88 88 return [s.lower()]
89 89
90 90 for lx, t in sorted(pygments.lexers.LEXERS.items()):
91 91 m = map(__clean, t[-2])
92 92 if m:
93 93 m = reduce(lambda x, y: x + y, m)
94 94 for ext in m:
95 95 desc = lx.replace('Lexer', '')
96 96 d[ext].append(desc)
97 97
98 98 data = dict(d)
99 99
100 100 extra_mapping = extra_mapping or {}
101 101 if extra_mapping:
102 102 for k, v in extra_mapping.items():
103 103 if k not in data:
104 104 # register new mapping2lexer
105 105 data[k] = [v]
106 106
107 107 return data
108 108
109 109
110 110 def str2bool(_str):
111 111 """
112 112 returns True/False value from given string, it tries to translate the
113 113 string into boolean
114 114
115 115 :param _str: string value to translate into boolean
116 116 :rtype: boolean
117 117 :returns: boolean from given string
118 118 """
119 119 if _str is None:
120 120 return False
121 121 if _str in (True, False):
122 122 return _str
123 123 _str = str(_str).strip().lower()
124 124 return _str in ('t', 'true', 'y', 'yes', 'on', '1')
125 125
126 126
127 127 def aslist(obj, sep=None, strip=True):
128 128 """
129 129 Returns given string separated by sep as list
130 130
131 131 :param obj:
132 132 :param sep:
133 133 :param strip:
134 134 """
135 135 if isinstance(obj, (basestring,)):
136 136 lst = obj.split(sep)
137 137 if strip:
138 138 lst = [v.strip() for v in lst]
139 139 return lst
140 140 elif isinstance(obj, (list, tuple)):
141 141 return obj
142 142 elif obj is None:
143 143 return []
144 144 else:
145 145 return [obj]
146 146
147 147
148 148 def convert_line_endings(line, mode):
149 149 """
150 150 Converts a given line "line end" accordingly to given mode
151 151
152 152 Available modes are::
153 153 0 - Unix
154 154 1 - Mac
155 155 2 - DOS
156 156
157 157 :param line: given line to convert
158 158 :param mode: mode to convert to
159 159 :rtype: str
160 160 :return: converted line according to mode
161 161 """
162 162 if mode == 0:
163 163 line = line.replace('\r\n', '\n')
164 164 line = line.replace('\r', '\n')
165 165 elif mode == 1:
166 166 line = line.replace('\r\n', '\r')
167 167 line = line.replace('\n', '\r')
168 168 elif mode == 2:
169 169 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
170 170 return line
171 171
172 172
173 173 def detect_mode(line, default):
174 174 """
175 175 Detects line break for given line, if line break couldn't be found
176 176 given default value is returned
177 177
178 178 :param line: str line
179 179 :param default: default
180 180 :rtype: int
181 181 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
182 182 """
183 183 if line.endswith('\r\n'):
184 184 return 2
185 185 elif line.endswith('\n'):
186 186 return 0
187 187 elif line.endswith('\r'):
188 188 return 1
189 189 else:
190 190 return default
191 191
192 192
193 193 def safe_int(val, default=None):
194 194 """
195 195 Returns int() of val if val is not convertable to int use default
196 196 instead
197 197
198 198 :param val:
199 199 :param default:
200 200 """
201 201
202 202 try:
203 203 val = int(val)
204 204 except (ValueError, TypeError):
205 205 val = default
206 206
207 207 return val
208 208
209 209
210 210 def safe_unicode(str_, from_encoding=None, use_chardet=False):
211 211 """
212 212 safe unicode function. Does few trick to turn str_ into unicode
213 213
214 214 In case of UnicodeDecode error, we try to return it with encoding detected
215 215 by chardet library if it fails fallback to unicode with errors replaced
216 216
217 217 :param str_: string to decode
218 218 :rtype: unicode
219 219 :returns: unicode object
220 220 """
221 221 if isinstance(str_, unicode):
222 222 return str_
223 223
224 224 if not from_encoding:
225 225 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
226 226 'utf8'), sep=',')
227 227 from_encoding = DEFAULT_ENCODINGS
228 228
229 229 if not isinstance(from_encoding, (list, tuple)):
230 230 from_encoding = [from_encoding]
231 231
232 232 try:
233 233 return unicode(str_)
234 234 except UnicodeDecodeError:
235 235 pass
236 236
237 237 for enc in from_encoding:
238 238 try:
239 239 return unicode(str_, enc)
240 240 except UnicodeDecodeError:
241 241 pass
242 242
243 243 if use_chardet:
244 244 try:
245 245 import chardet
246 246 encoding = chardet.detect(str_)['encoding']
247 247 if encoding is None:
248 248 raise Exception()
249 249 return str_.decode(encoding)
250 250 except (ImportError, UnicodeDecodeError, Exception):
251 251 return unicode(str_, from_encoding[0], 'replace')
252 252 else:
253 253 return unicode(str_, from_encoding[0], 'replace')
254 254
255 255 def safe_str(unicode_, to_encoding=None, use_chardet=False):
256 256 """
257 257 safe str function. Does few trick to turn unicode_ into string
258 258
259 259 In case of UnicodeEncodeError, we try to return it with encoding detected
260 260 by chardet library if it fails fallback to string with errors replaced
261 261
262 262 :param unicode_: unicode to encode
263 263 :rtype: str
264 264 :returns: str object
265 265 """
266 266
267 267 # if it's not basestr cast to str
268 268 if not isinstance(unicode_, compat.string_types):
269 269 return str(unicode_)
270 270
271 271 if isinstance(unicode_, str):
272 272 return unicode_
273 273
274 274 if not to_encoding:
275 275 DEFAULT_ENCODINGS = aslist(rhodecode.CONFIG.get('default_encoding',
276 276 'utf8'), sep=',')
277 277 to_encoding = DEFAULT_ENCODINGS
278 278
279 279 if not isinstance(to_encoding, (list, tuple)):
280 280 to_encoding = [to_encoding]
281 281
282 282 for enc in to_encoding:
283 283 try:
284 284 return unicode_.encode(enc)
285 285 except UnicodeEncodeError:
286 286 pass
287 287
288 288 if use_chardet:
289 289 try:
290 290 import chardet
291 291 encoding = chardet.detect(unicode_)['encoding']
292 292 if encoding is None:
293 293 raise UnicodeEncodeError()
294 294
295 295 return unicode_.encode(encoding)
296 296 except (ImportError, UnicodeEncodeError):
297 297 return unicode_.encode(to_encoding[0], 'replace')
298 298 else:
299 299 return unicode_.encode(to_encoding[0], 'replace')
300 300
301 301
302 302 def remove_suffix(s, suffix):
303 303 if s.endswith(suffix):
304 304 s = s[:-1 * len(suffix)]
305 305 return s
306 306
307 307
308 308 def remove_prefix(s, prefix):
309 309 if s.startswith(prefix):
310 310 s = s[len(prefix):]
311 311 return s
312 312
313 313
314 314 def find_calling_context(ignore_modules=None):
315 315 """
316 316 Look through the calling stack and return the frame which called
317 317 this function and is part of core module ( ie. rhodecode.* )
318 318
319 319 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
320 320 """
321 321
322 322 ignore_modules = ignore_modules or []
323 323
324 324 f = sys._getframe(2)
325 325 while f.f_back is not None:
326 326 name = f.f_globals.get('__name__')
327 327 if name and name.startswith(__name__.split('.')[0]):
328 328 if name not in ignore_modules:
329 329 return f
330 330 f = f.f_back
331 331 return None
332 332
333 333
334 334 def ping_connection(connection, branch):
335 335 if branch:
336 336 # "branch" refers to a sub-connection of a connection,
337 337 # we don't want to bother pinging on these.
338 338 return
339 339
340 340 # turn off "close with result". This flag is only used with
341 341 # "connectionless" execution, otherwise will be False in any case
342 342 save_should_close_with_result = connection.should_close_with_result
343 343 connection.should_close_with_result = False
344 344
345 345 try:
346 346 # run a SELECT 1. use a core select() so that
347 347 # the SELECT of a scalar value without a table is
348 348 # appropriately formatted for the backend
349 349 connection.scalar(sqlalchemy.sql.select([1]))
350 350 except sqlalchemy.exc.DBAPIError as err:
351 351 # catch SQLAlchemy's DBAPIError, which is a wrapper
352 352 # for the DBAPI's exception. It includes a .connection_invalidated
353 353 # attribute which specifies if this connection is a "disconnect"
354 354 # condition, which is based on inspection of the original exception
355 355 # by the dialect in use.
356 356 if err.connection_invalidated:
357 357 # run the same SELECT again - the connection will re-validate
358 358 # itself and establish a new connection. The disconnect detection
359 359 # here also causes the whole connection pool to be invalidated
360 360 # so that all stale connections are discarded.
361 361 connection.scalar(sqlalchemy.sql.select([1]))
362 362 else:
363 363 raise
364 364 finally:
365 365 # restore "close with result"
366 366 connection.should_close_with_result = save_should_close_with_result
367 367
368 368
369 369 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
370 370 """Custom engine_from_config functions."""
371 371 log = logging.getLogger('sqlalchemy.engine')
372 372 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
373 373 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
374 374
375 375 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
376 376
377 377 def color_sql(sql):
378 378 color_seq = '\033[1;33m' # This is yellow: code 33
379 379 normal = '\x1b[0m'
380 380 return ''.join([color_seq, sql, normal])
381 381
382 382 if use_ping_connection:
383 383 log.debug('Adding ping_connection on the engine config.')
384 384 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
385 385
386 386 if debug:
387 387 # attach events only for debug configuration
388 388 def before_cursor_execute(conn, cursor, statement,
389 389 parameters, context, executemany):
390 390 setattr(conn, 'query_start_time', time.time())
391 391 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
392 392 calling_context = find_calling_context(ignore_modules=[
393 393 'rhodecode.lib.caching_query',
394 394 'rhodecode.model.settings',
395 395 ])
396 396 if calling_context:
397 397 log.info(color_sql('call context %s:%s' % (
398 398 calling_context.f_code.co_filename,
399 399 calling_context.f_lineno,
400 400 )))
401 401
402 402 def after_cursor_execute(conn, cursor, statement,
403 403 parameters, context, executemany):
404 404 delattr(conn, 'query_start_time')
405 405
406 406 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
407 407 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
408 408
409 409 return engine
410 410
411 411
412 412 def get_encryption_key(config):
413 413 secret = config.get('rhodecode.encrypted_values.secret')
414 414 default = config['beaker.session.secret']
415 415 return secret or default
416 416
417 417
418 418 def age(prevdate, now=None, show_short_version=False, show_suffix=True,
419 419 short_format=False):
420 420 """
421 421 Turns a datetime into an age string.
422 422 If show_short_version is True, this generates a shorter string with
423 423 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
424 424
425 425 * IMPORTANT*
426 426 Code of this function is written in special way so it's easier to
427 427 backport it to javascript. If you mean to update it, please also update
428 428 `jquery.timeago-extension.js` file
429 429
430 430 :param prevdate: datetime object
431 431 :param now: get current time, if not define we use
432 432 `datetime.datetime.now()`
433 433 :param show_short_version: if it should approximate the date and
434 434 return a shorter string
435 435 :param show_suffix:
436 436 :param short_format: show short format, eg 2D instead of 2 days
437 437 :rtype: unicode
438 438 :returns: unicode words describing age
439 439 """
440 440
441 441 def _get_relative_delta(now, prevdate):
442 442 base = dateutil.relativedelta.relativedelta(now, prevdate)
443 443 return {
444 444 'year': base.years,
445 445 'month': base.months,
446 446 'day': base.days,
447 447 'hour': base.hours,
448 448 'minute': base.minutes,
449 449 'second': base.seconds,
450 450 }
451 451
452 452 def _is_leap_year(year):
453 453 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
454 454
455 455 def get_month(prevdate):
456 456 return prevdate.month
457 457
458 458 def get_year(prevdate):
459 459 return prevdate.year
460 460
461 461 now = now or datetime.datetime.now()
462 462 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
463 463 deltas = {}
464 464 future = False
465 465
466 466 if prevdate > now:
467 467 now_old = now
468 468 now = prevdate
469 469 prevdate = now_old
470 470 future = True
471 471 if future:
472 472 prevdate = prevdate.replace(microsecond=0)
473 473 # Get date parts deltas
474 474 for part in order:
475 475 rel_delta = _get_relative_delta(now, prevdate)
476 476 deltas[part] = rel_delta[part]
477 477
478 478 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
479 479 # not 1 hour, -59 minutes and -59 seconds)
480 480 offsets = [[5, 60], [4, 60], [3, 24]]
481 481 for element in offsets: # seconds, minutes, hours
482 482 num = element[0]
483 483 length = element[1]
484 484
485 485 part = order[num]
486 486 carry_part = order[num - 1]
487 487
488 488 if deltas[part] < 0:
489 489 deltas[part] += length
490 490 deltas[carry_part] -= 1
491 491
492 492 # Same thing for days except that the increment depends on the (variable)
493 493 # number of days in the month
494 494 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
495 495 if deltas['day'] < 0:
496 496 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
497 497 deltas['day'] += 29
498 498 else:
499 499 deltas['day'] += month_lengths[get_month(prevdate) - 1]
500 500
501 501 deltas['month'] -= 1
502 502
503 503 if deltas['month'] < 0:
504 504 deltas['month'] += 12
505 505 deltas['year'] -= 1
506 506
507 507 # Format the result
508 508 if short_format:
509 509 fmt_funcs = {
510 510 'year': lambda d: u'%dy' % d,
511 511 'month': lambda d: u'%dm' % d,
512 512 'day': lambda d: u'%dd' % d,
513 513 'hour': lambda d: u'%dh' % d,
514 514 'minute': lambda d: u'%dmin' % d,
515 515 'second': lambda d: u'%dsec' % d,
516 516 }
517 517 else:
518 518 fmt_funcs = {
519 519 'year': lambda d: _pluralize(u'${num} year', u'${num} years', d, mapping={'num': d}).interpolate(),
520 520 'month': lambda d: _pluralize(u'${num} month', u'${num} months', d, mapping={'num': d}).interpolate(),
521 521 'day': lambda d: _pluralize(u'${num} day', u'${num} days', d, mapping={'num': d}).interpolate(),
522 522 'hour': lambda d: _pluralize(u'${num} hour', u'${num} hours', d, mapping={'num': d}).interpolate(),
523 523 'minute': lambda d: _pluralize(u'${num} minute', u'${num} minutes', d, mapping={'num': d}).interpolate(),
524 524 'second': lambda d: _pluralize(u'${num} second', u'${num} seconds', d, mapping={'num': d}).interpolate(),
525 525 }
526 526
527 527 i = 0
528 528 for part in order:
529 529 value = deltas[part]
530 530 if value != 0:
531 531
532 532 if i < 5:
533 533 sub_part = order[i + 1]
534 534 sub_value = deltas[sub_part]
535 535 else:
536 536 sub_value = 0
537 537
538 538 if sub_value == 0 or show_short_version:
539 539 _val = fmt_funcs[part](value)
540 540 if future:
541 541 if show_suffix:
542 542 return _(u'in ${ago}', mapping={'ago': _val})
543 543 else:
544 544 return _(_val)
545 545
546 546 else:
547 547 if show_suffix:
548 548 return _(u'${ago} ago', mapping={'ago': _val})
549 549 else:
550 550 return _(_val)
551 551
552 552 val = fmt_funcs[part](value)
553 553 val_detail = fmt_funcs[sub_part](sub_value)
554 554 mapping = {'val': val, 'detail': val_detail}
555 555
556 556 if short_format:
557 557 datetime_tmpl = _(u'${val}, ${detail}', mapping=mapping)
558 558 if show_suffix:
559 559 datetime_tmpl = _(u'${val}, ${detail} ago', mapping=mapping)
560 560 if future:
561 561 datetime_tmpl = _(u'in ${val}, ${detail}', mapping=mapping)
562 562 else:
563 563 datetime_tmpl = _(u'${val} and ${detail}', mapping=mapping)
564 564 if show_suffix:
565 565 datetime_tmpl = _(u'${val} and ${detail} ago', mapping=mapping)
566 566 if future:
567 567 datetime_tmpl = _(u'in ${val} and ${detail}', mapping=mapping)
568 568
569 569 return datetime_tmpl
570 570 i += 1
571 571 return _(u'just now')
572 572
573 573
574 574 def age_from_seconds(seconds):
575 575 seconds = safe_int(seconds) or 0
576 576 prevdate = time_to_datetime(time.time() + seconds)
577 577 return age(prevdate, show_suffix=False, show_short_version=True)
578 578
579 579
580 580 def cleaned_uri(uri):
581 581 """
582 582 Quotes '[' and ']' from uri if there is only one of them.
583 583 according to RFC3986 we cannot use such chars in uri
584 584 :param uri:
585 585 :return: uri without this chars
586 586 """
587 587 return urllib.quote(uri, safe='@$:/')
588 588
589 589
590 590 def credentials_filter(uri):
591 591 """
592 592 Returns a url with removed credentials
593 593
594 594 :param uri:
595 595 """
596 596 import urlobject
597 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
598 return 'InvalidDecryptionKey'
599
597 600 url_obj = urlobject.URLObject(cleaned_uri(uri))
598 601 url_obj = url_obj.without_password().without_username()
599 602
600 603 return url_obj
601 604
602 605
603 606 def get_host_info(request):
604 607 """
605 608 Generate host info, to obtain full url e.g https://server.com
606 609 use this
607 610 `{scheme}://{netloc}`
608 611 """
609 612 if not request:
610 613 return {}
611 614
612 615 qualified_home_url = request.route_url('home')
613 616 parsed_url = urlobject.URLObject(qualified_home_url)
614 617 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
615 618
616 619 return {
617 620 'scheme': parsed_url.scheme,
618 621 'netloc': parsed_url.netloc+decoded_path,
619 622 'hostname': parsed_url.hostname,
620 623 }
621 624
622 625
623 626 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
624 627 qualified_home_url = request.route_url('home')
625 628 parsed_url = urlobject.URLObject(qualified_home_url)
626 629 decoded_path = safe_unicode(urllib.unquote(parsed_url.path.rstrip('/')))
627 630
628 631 args = {
629 632 'scheme': parsed_url.scheme,
630 633 'user': '',
631 634 'sys_user': getpass.getuser(),
632 635 # path if we use proxy-prefix
633 636 'netloc': parsed_url.netloc+decoded_path,
634 637 'hostname': parsed_url.hostname,
635 638 'prefix': decoded_path,
636 639 'repo': repo_name,
637 640 'repoid': str(repo_id),
638 641 'repo_type': repo_type
639 642 }
640 643 args.update(override)
641 644 args['user'] = urllib.quote(safe_str(args['user']))
642 645
643 646 for k, v in args.items():
644 647 uri_tmpl = uri_tmpl.replace('{%s}' % k, v)
645 648
646 649 # special case for SVN clone url
647 650 if repo_type == 'svn':
648 651 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
649 652
650 653 # remove leading @ sign if it's present. Case of empty user
651 654 url_obj = urlobject.URLObject(uri_tmpl)
652 655 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
653 656
654 657 return safe_unicode(url)
655 658
656 659
657 660 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
658 maybe_unreachable=False):
661 maybe_unreachable=False, reference_obj=None):
659 662 """
660 663 Safe version of get_commit if this commit doesn't exists for a
661 664 repository it returns a Dummy one instead
662 665
663 666 :param repo: repository instance
664 667 :param commit_id: commit id as str
665 668 :param commit_idx: numeric commit index
666 669 :param pre_load: optional list of commit attributes to load
667 670 :param maybe_unreachable: translate unreachable commits on git repos
671 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
668 672 """
669 673 # TODO(skreft): remove these circular imports
670 674 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
671 675 from rhodecode.lib.vcs.exceptions import RepositoryError
672 676 if not isinstance(repo, BaseRepository):
673 677 raise Exception('You must pass an Repository '
674 678 'object as first argument got %s', type(repo))
675 679
676 680 try:
677 681 commit = repo.get_commit(
678 682 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
679 maybe_unreachable=maybe_unreachable)
683 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
680 684 except (RepositoryError, LookupError):
681 685 commit = EmptyCommit()
682 686 return commit
683 687
684 688
685 689 def datetime_to_time(dt):
686 690 if dt:
687 691 return time.mktime(dt.timetuple())
688 692
689 693
690 694 def time_to_datetime(tm):
691 695 if tm:
692 696 if isinstance(tm, compat.string_types):
693 697 try:
694 698 tm = float(tm)
695 699 except ValueError:
696 700 return
697 701 return datetime.datetime.fromtimestamp(tm)
698 702
699 703
700 704 def time_to_utcdatetime(tm):
701 705 if tm:
702 706 if isinstance(tm, compat.string_types):
703 707 try:
704 708 tm = float(tm)
705 709 except ValueError:
706 710 return
707 711 return datetime.datetime.utcfromtimestamp(tm)
708 712
709 713
710 714 MENTIONS_REGEX = re.compile(
711 715 # ^@ or @ without any special chars in front
712 716 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
713 717 # main body starts with letter, then can be . - _
714 718 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
715 719 re.VERBOSE | re.MULTILINE)
716 720
717 721
718 722 def extract_mentioned_users(s):
719 723 """
720 724 Returns unique usernames from given string s that have @mention
721 725
722 726 :param s: string to get mentions
723 727 """
724 728 usrs = set()
725 729 for username in MENTIONS_REGEX.findall(s):
726 730 usrs.add(username)
727 731
728 732 return sorted(list(usrs), key=lambda k: k.lower())
729 733
730 734
731 735 class AttributeDictBase(dict):
732 736 def __getstate__(self):
733 737 odict = self.__dict__ # get attribute dictionary
734 738 return odict
735 739
736 740 def __setstate__(self, dict):
737 741 self.__dict__ = dict
738 742
739 743 __setattr__ = dict.__setitem__
740 744 __delattr__ = dict.__delitem__
741 745
742 746
743 747 class StrictAttributeDict(AttributeDictBase):
744 748 """
745 749 Strict Version of Attribute dict which raises an Attribute error when
746 750 requested attribute is not set
747 751 """
748 752 def __getattr__(self, attr):
749 753 try:
750 754 return self[attr]
751 755 except KeyError:
752 756 raise AttributeError('%s object has no attribute %s' % (
753 757 self.__class__, attr))
754 758
755 759
756 760 class AttributeDict(AttributeDictBase):
757 761 def __getattr__(self, attr):
758 762 return self.get(attr, None)
759 763
760 764
761 765
762 766 class OrderedDefaultDict(collections.OrderedDict, collections.defaultdict):
763 767 def __init__(self, default_factory=None, *args, **kwargs):
764 768 # in python3 you can omit the args to super
765 769 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
766 770 self.default_factory = default_factory
767 771
768 772
769 773 def fix_PATH(os_=None):
770 774 """
771 775 Get current active python path, and append it to PATH variable to fix
772 776 issues of subprocess calls and different python versions
773 777 """
774 778 if os_ is None:
775 779 import os
776 780 else:
777 781 os = os_
778 782
779 783 cur_path = os.path.split(sys.executable)[0]
780 784 if not os.environ['PATH'].startswith(cur_path):
781 785 os.environ['PATH'] = '%s:%s' % (cur_path, os.environ['PATH'])
782 786
783 787
784 788 def obfuscate_url_pw(engine):
785 789 _url = engine or ''
786 790 try:
787 791 _url = sqlalchemy.engine.url.make_url(engine)
788 792 if _url.password:
789 793 _url.password = 'XXXXX'
790 794 except Exception:
791 795 pass
792 796 return unicode(_url)
793 797
794 798
795 799 def get_server_url(environ):
796 800 req = webob.Request(environ)
797 801 return req.host_url + req.script_name
798 802
799 803
800 804 def unique_id(hexlen=32):
801 805 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
802 806 return suuid(truncate_to=hexlen, alphabet=alphabet)
803 807
804 808
805 809 def suuid(url=None, truncate_to=22, alphabet=None):
806 810 """
807 811 Generate and return a short URL safe UUID.
808 812
809 813 If the url parameter is provided, set the namespace to the provided
810 814 URL and generate a UUID.
811 815
812 816 :param url to get the uuid for
813 817 :truncate_to: truncate the basic 22 UUID to shorter version
814 818
815 819 The IDs won't be universally unique any longer, but the probability of
816 820 a collision will still be very low.
817 821 """
818 822 # Define our alphabet.
819 823 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
820 824
821 825 # If no URL is given, generate a random UUID.
822 826 if url is None:
823 827 unique_id = uuid.uuid4().int
824 828 else:
825 829 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
826 830
827 831 alphabet_length = len(_ALPHABET)
828 832 output = []
829 833 while unique_id > 0:
830 834 digit = unique_id % alphabet_length
831 835 output.append(_ALPHABET[digit])
832 836 unique_id = int(unique_id / alphabet_length)
833 837 return "".join(output)[:truncate_to]
834 838
835 839
836 840 def get_current_rhodecode_user(request=None):
837 841 """
838 842 Gets rhodecode user from request
839 843 """
840 844 pyramid_request = request or pyramid.threadlocal.get_current_request()
841 845
842 846 # web case
843 847 if pyramid_request and hasattr(pyramid_request, 'user'):
844 848 return pyramid_request.user
845 849
846 850 # api case
847 851 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
848 852 return pyramid_request.rpc_user
849 853
850 854 return None
851 855
852 856
853 857 def action_logger_generic(action, namespace=''):
854 858 """
855 859 A generic logger for actions useful to the system overview, tries to find
856 860 an acting user for the context of the call otherwise reports unknown user
857 861
858 862 :param action: logging message eg 'comment 5 deleted'
859 863 :param type: string
860 864
861 865 :param namespace: namespace of the logging message eg. 'repo.comments'
862 866 :param type: string
863 867
864 868 """
865 869
866 870 logger_name = 'rhodecode.actions'
867 871
868 872 if namespace:
869 873 logger_name += '.' + namespace
870 874
871 875 log = logging.getLogger(logger_name)
872 876
873 877 # get a user if we can
874 878 user = get_current_rhodecode_user()
875 879
876 880 logfunc = log.info
877 881
878 882 if not user:
879 883 user = '<unknown user>'
880 884 logfunc = log.warning
881 885
882 886 logfunc('Logging action by {}: {}'.format(user, action))
883 887
884 888
885 889 def escape_split(text, sep=',', maxsplit=-1):
886 890 r"""
887 891 Allows for escaping of the separator: e.g. arg='foo\, bar'
888 892
889 893 It should be noted that the way bash et. al. do command line parsing, those
890 894 single quotes are required.
891 895 """
892 896 escaped_sep = r'\%s' % sep
893 897
894 898 if escaped_sep not in text:
895 899 return text.split(sep, maxsplit)
896 900
897 901 before, _mid, after = text.partition(escaped_sep)
898 902 startlist = before.split(sep, maxsplit) # a regular split is fine here
899 903 unfinished = startlist[-1]
900 904 startlist = startlist[:-1]
901 905
902 906 # recurse because there may be more escaped separators
903 907 endlist = escape_split(after, sep, maxsplit)
904 908
905 909 # finish building the escaped value. we use endlist[0] becaue the first
906 910 # part of the string sent in recursion is the rest of the escaped value.
907 911 unfinished += sep + endlist[0]
908 912
909 913 return startlist + [unfinished] + endlist[1:] # put together all the parts
910 914
911 915
912 916 class OptionalAttr(object):
913 917 """
914 918 Special Optional Option that defines other attribute. Example::
915 919
916 920 def test(apiuser, userid=Optional(OAttr('apiuser')):
917 921 user = Optional.extract(userid)
918 922 # calls
919 923
920 924 """
921 925
922 926 def __init__(self, attr_name):
923 927 self.attr_name = attr_name
924 928
925 929 def __repr__(self):
926 930 return '<OptionalAttr:%s>' % self.attr_name
927 931
928 932 def __call__(self):
929 933 return self
930 934
931 935
932 936 # alias
933 937 OAttr = OptionalAttr
934 938
935 939
936 940 class Optional(object):
937 941 """
938 942 Defines an optional parameter::
939 943
940 944 param = param.getval() if isinstance(param, Optional) else param
941 945 param = param() if isinstance(param, Optional) else param
942 946
943 947 is equivalent of::
944 948
945 949 param = Optional.extract(param)
946 950
947 951 """
948 952
949 953 def __init__(self, type_):
950 954 self.type_ = type_
951 955
952 956 def __repr__(self):
953 957 return '<Optional:%s>' % self.type_.__repr__()
954 958
955 959 def __call__(self):
956 960 return self.getval()
957 961
958 962 def getval(self):
959 963 """
960 964 returns value from this Optional instance
961 965 """
962 966 if isinstance(self.type_, OAttr):
963 967 # use params name
964 968 return self.type_.attr_name
965 969 return self.type_
966 970
967 971 @classmethod
968 972 def extract(cls, val):
969 973 """
970 974 Extracts value from Optional() instance
971 975
972 976 :param val:
973 977 :return: original value if it's not Optional instance else
974 978 value of instance
975 979 """
976 980 if isinstance(val, cls):
977 981 return val.getval()
978 982 return val
979 983
980 984
981 985 def glob2re(pat):
982 986 """
983 987 Translate a shell PATTERN to a regular expression.
984 988
985 989 There is no way to quote meta-characters.
986 990 """
987 991
988 992 i, n = 0, len(pat)
989 993 res = ''
990 994 while i < n:
991 995 c = pat[i]
992 996 i = i+1
993 997 if c == '*':
994 998 #res = res + '.*'
995 999 res = res + '[^/]*'
996 1000 elif c == '?':
997 1001 #res = res + '.'
998 1002 res = res + '[^/]'
999 1003 elif c == '[':
1000 1004 j = i
1001 1005 if j < n and pat[j] == '!':
1002 1006 j = j+1
1003 1007 if j < n and pat[j] == ']':
1004 1008 j = j+1
1005 1009 while j < n and pat[j] != ']':
1006 1010 j = j+1
1007 1011 if j >= n:
1008 1012 res = res + '\\['
1009 1013 else:
1010 1014 stuff = pat[i:j].replace('\\','\\\\')
1011 1015 i = j+1
1012 1016 if stuff[0] == '!':
1013 1017 stuff = '^' + stuff[1:]
1014 1018 elif stuff[0] == '^':
1015 1019 stuff = '\\' + stuff
1016 1020 res = '%s[%s]' % (res, stuff)
1017 1021 else:
1018 1022 res = res + re.escape(c)
1019 1023 return res + '\Z(?ms)'
1020 1024
1021 1025
1022 1026 def parse_byte_string(size_str):
1023 1027 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
1024 1028 if not match:
1025 1029 raise ValueError('Given size:%s is invalid, please make sure '
1026 1030 'to use format of <num>(MB|KB)' % size_str)
1027 1031
1028 1032 _parts = match.groups()
1029 1033 num, type_ = _parts
1030 1034 return long(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
1031 1035
1032 1036
1033 1037 class CachedProperty(object):
1034 1038 """
1035 1039 Lazy Attributes. With option to invalidate the cache by running a method
1036 1040
1037 1041 class Foo():
1038 1042
1039 1043 @CachedProperty
1040 1044 def heavy_func():
1041 1045 return 'super-calculation'
1042 1046
1043 1047 foo = Foo()
1044 1048 foo.heavy_func() # first computions
1045 1049 foo.heavy_func() # fetch from cache
1046 1050 foo._invalidate_prop_cache('heavy_func')
1047 1051 # at this point calling foo.heavy_func() will be re-computed
1048 1052 """
1049 1053
1050 1054 def __init__(self, func, func_name=None):
1051 1055
1052 1056 if func_name is None:
1053 1057 func_name = func.__name__
1054 1058 self.data = (func, func_name)
1055 1059 update_wrapper(self, func)
1056 1060
1057 1061 def __get__(self, inst, class_):
1058 1062 if inst is None:
1059 1063 return self
1060 1064
1061 1065 func, func_name = self.data
1062 1066 value = func(inst)
1063 1067 inst.__dict__[func_name] = value
1064 1068 if '_invalidate_prop_cache' not in inst.__dict__:
1065 1069 inst.__dict__['_invalidate_prop_cache'] = partial(
1066 1070 self._invalidate_prop_cache, inst)
1067 1071 return value
1068 1072
1069 1073 def _invalidate_prop_cache(self, inst, name):
1070 1074 inst.__dict__.pop(name, None)
@@ -1,1933 +1,1937 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 _Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class Reference(_Reference):
64 64
65 65 @property
66 66 def branch(self):
67 67 if self.type == 'branch':
68 68 return self.name
69 69
70 70 @property
71 71 def bookmark(self):
72 72 if self.type == 'book':
73 73 return self.name
74 74
75 @property
76 def to_unicode(self):
77 return reference_to_unicode(self)
78
75 79
76 80 def unicode_to_reference(raw):
77 81 """
78 82 Convert a unicode (or string) to a reference object.
79 83 If unicode evaluates to False it returns None.
80 84 """
81 85 if raw:
82 86 refs = raw.split(':')
83 87 return Reference(*refs)
84 88 else:
85 89 return None
86 90
87 91
88 92 def reference_to_unicode(ref):
89 93 """
90 94 Convert a reference object to unicode.
91 95 If reference is None it returns None.
92 96 """
93 97 if ref:
94 98 return u':'.join(ref)
95 99 else:
96 100 return None
97 101
98 102
99 103 class MergeFailureReason(object):
100 104 """
101 105 Enumeration with all the reasons why the server side merge could fail.
102 106
103 107 DO NOT change the number of the reasons, as they may be stored in the
104 108 database.
105 109
106 110 Changing the name of a reason is acceptable and encouraged to deprecate old
107 111 reasons.
108 112 """
109 113
110 114 # Everything went well.
111 115 NONE = 0
112 116
113 117 # An unexpected exception was raised. Check the logs for more details.
114 118 UNKNOWN = 1
115 119
116 120 # The merge was not successful, there are conflicts.
117 121 MERGE_FAILED = 2
118 122
119 123 # The merge succeeded but we could not push it to the target repository.
120 124 PUSH_FAILED = 3
121 125
122 126 # The specified target is not a head in the target repository.
123 127 TARGET_IS_NOT_HEAD = 4
124 128
125 129 # The source repository contains more branches than the target. Pushing
126 130 # the merge will create additional branches in the target.
127 131 HG_SOURCE_HAS_MORE_BRANCHES = 5
128 132
129 133 # The target reference has multiple heads. That does not allow to correctly
130 134 # identify the target location. This could only happen for mercurial
131 135 # branches.
132 136 HG_TARGET_HAS_MULTIPLE_HEADS = 6
133 137
134 138 # The target repository is locked
135 139 TARGET_IS_LOCKED = 7
136 140
137 141 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
138 142 # A involved commit could not be found.
139 143 _DEPRECATED_MISSING_COMMIT = 8
140 144
141 145 # The target repo reference is missing.
142 146 MISSING_TARGET_REF = 9
143 147
144 148 # The source repo reference is missing.
145 149 MISSING_SOURCE_REF = 10
146 150
147 151 # The merge was not successful, there are conflicts related to sub
148 152 # repositories.
149 153 SUBREPO_MERGE_FAILED = 11
150 154
151 155
152 156 class UpdateFailureReason(object):
153 157 """
154 158 Enumeration with all the reasons why the pull request update could fail.
155 159
156 160 DO NOT change the number of the reasons, as they may be stored in the
157 161 database.
158 162
159 163 Changing the name of a reason is acceptable and encouraged to deprecate old
160 164 reasons.
161 165 """
162 166
163 167 # Everything went well.
164 168 NONE = 0
165 169
166 170 # An unexpected exception was raised. Check the logs for more details.
167 171 UNKNOWN = 1
168 172
169 173 # The pull request is up to date.
170 174 NO_CHANGE = 2
171 175
172 176 # The pull request has a reference type that is not supported for update.
173 177 WRONG_REF_TYPE = 3
174 178
175 179 # Update failed because the target reference is missing.
176 180 MISSING_TARGET_REF = 4
177 181
178 182 # Update failed because the source reference is missing.
179 183 MISSING_SOURCE_REF = 5
180 184
181 185
182 186 class MergeResponse(object):
183 187
184 188 # uses .format(**metadata) for variables
185 189 MERGE_STATUS_MESSAGES = {
186 190 MergeFailureReason.NONE: lazy_ugettext(
187 191 u'This pull request can be automatically merged.'),
188 192 MergeFailureReason.UNKNOWN: lazy_ugettext(
189 193 u'This pull request cannot be merged because of an unhandled exception. '
190 194 u'{exception}'),
191 195 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
192 196 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
193 197 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
194 198 u'This pull request could not be merged because push to '
195 199 u'target:`{target}@{merge_commit}` failed.'),
196 200 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
197 201 u'This pull request cannot be merged because the target '
198 202 u'`{target_ref.name}` is not a head.'),
199 203 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
200 204 u'This pull request cannot be merged because the source contains '
201 205 u'more branches than the target.'),
202 206 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
203 207 u'This pull request cannot be merged because the target `{target_ref.name}` '
204 208 u'has multiple heads: `{heads}`.'),
205 209 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
206 210 u'This pull request cannot be merged because the target repository is '
207 211 u'locked by {locked_by}.'),
208 212
209 213 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
210 214 u'This pull request cannot be merged because the target '
211 215 u'reference `{target_ref.name}` is missing.'),
212 216 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
213 217 u'This pull request cannot be merged because the source '
214 218 u'reference `{source_ref.name}` is missing.'),
215 219 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
216 220 u'This pull request cannot be merged because of conflicts related '
217 221 u'to sub repositories.'),
218 222
219 223 # Deprecations
220 224 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
221 225 u'This pull request cannot be merged because the target or the '
222 226 u'source reference is missing.'),
223 227
224 228 }
225 229
226 230 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
227 231 self.possible = possible
228 232 self.executed = executed
229 233 self.merge_ref = merge_ref
230 234 self.failure_reason = failure_reason
231 235 self.metadata = metadata or {}
232 236
233 237 def __repr__(self):
234 238 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
235 239
236 240 def __eq__(self, other):
237 241 same_instance = isinstance(other, self.__class__)
238 242 return same_instance \
239 243 and self.possible == other.possible \
240 244 and self.executed == other.executed \
241 245 and self.failure_reason == other.failure_reason
242 246
243 247 @property
244 248 def label(self):
245 249 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
246 250 not k.startswith('_'))
247 251 return label_dict.get(self.failure_reason)
248 252
249 253 @property
250 254 def merge_status_message(self):
251 255 """
252 256 Return a human friendly error message for the given merge status code.
253 257 """
254 258 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
255 259
256 260 try:
257 261 return msg.format(**self.metadata)
258 262 except Exception:
259 263 log.exception('Failed to format %s message', self)
260 264 return msg
261 265
262 266 def asdict(self):
263 267 data = {}
264 268 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
265 269 'merge_status_message']:
266 270 data[k] = getattr(self, k)
267 271 return data
268 272
269 273
270 274 class TargetRefMissing(ValueError):
271 275 pass
272 276
273 277
274 278 class SourceRefMissing(ValueError):
275 279 pass
276 280
277 281
278 282 class BaseRepository(object):
279 283 """
280 284 Base Repository for final backends
281 285
282 286 .. attribute:: DEFAULT_BRANCH_NAME
283 287
284 288 name of default branch (i.e. "trunk" for svn, "master" for git etc.
285 289
286 290 .. attribute:: commit_ids
287 291
288 292 list of all available commit ids, in ascending order
289 293
290 294 .. attribute:: path
291 295
292 296 absolute path to the repository
293 297
294 298 .. attribute:: bookmarks
295 299
296 300 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
297 301 there are no bookmarks or the backend implementation does not support
298 302 bookmarks.
299 303
300 304 .. attribute:: tags
301 305
302 306 Mapping from name to :term:`Commit ID` of the tag.
303 307
304 308 """
305 309
306 310 DEFAULT_BRANCH_NAME = None
307 311 DEFAULT_CONTACT = u"Unknown"
308 312 DEFAULT_DESCRIPTION = u"unknown"
309 313 EMPTY_COMMIT_ID = '0' * 40
310 314
311 315 path = None
312 316
313 317 _is_empty = None
314 318 _commit_ids = {}
315 319
316 320 def __init__(self, repo_path, config=None, create=False, **kwargs):
317 321 """
318 322 Initializes repository. Raises RepositoryError if repository could
319 323 not be find at the given ``repo_path`` or directory at ``repo_path``
320 324 exists and ``create`` is set to True.
321 325
322 326 :param repo_path: local path of the repository
323 327 :param config: repository configuration
324 328 :param create=False: if set to True, would try to create repository.
325 329 :param src_url=None: if set, should be proper url from which repository
326 330 would be cloned; requires ``create`` parameter to be set to True -
327 331 raises RepositoryError if src_url is set and create evaluates to
328 332 False
329 333 """
330 334 raise NotImplementedError
331 335
332 336 def __repr__(self):
333 337 return '<%s at %s>' % (self.__class__.__name__, self.path)
334 338
335 339 def __len__(self):
336 340 return self.count()
337 341
338 342 def __eq__(self, other):
339 343 same_instance = isinstance(other, self.__class__)
340 344 return same_instance and other.path == self.path
341 345
342 346 def __ne__(self, other):
343 347 return not self.__eq__(other)
344 348
345 349 def get_create_shadow_cache_pr_path(self, db_repo):
346 350 path = db_repo.cached_diffs_dir
347 351 if not os.path.exists(path):
348 352 os.makedirs(path, 0o755)
349 353 return path
350 354
351 355 @classmethod
352 356 def get_default_config(cls, default=None):
353 357 config = Config()
354 358 if default and isinstance(default, list):
355 359 for section, key, val in default:
356 360 config.set(section, key, val)
357 361 return config
358 362
359 363 @LazyProperty
360 364 def _remote(self):
361 365 raise NotImplementedError
362 366
363 367 def _heads(self, branch=None):
364 368 return []
365 369
366 370 @LazyProperty
367 371 def EMPTY_COMMIT(self):
368 372 return EmptyCommit(self.EMPTY_COMMIT_ID)
369 373
370 374 @LazyProperty
371 375 def alias(self):
372 376 for k, v in settings.BACKENDS.items():
373 377 if v.split('.')[-1] == str(self.__class__.__name__):
374 378 return k
375 379
376 380 @LazyProperty
377 381 def name(self):
378 382 return safe_unicode(os.path.basename(self.path))
379 383
380 384 @LazyProperty
381 385 def description(self):
382 386 raise NotImplementedError
383 387
384 388 def refs(self):
385 389 """
386 390 returns a `dict` with branches, bookmarks, tags, and closed_branches
387 391 for this repository
388 392 """
389 393 return dict(
390 394 branches=self.branches,
391 395 branches_closed=self.branches_closed,
392 396 tags=self.tags,
393 397 bookmarks=self.bookmarks
394 398 )
395 399
396 400 @LazyProperty
397 401 def branches(self):
398 402 """
399 403 A `dict` which maps branch names to commit ids.
400 404 """
401 405 raise NotImplementedError
402 406
403 407 @LazyProperty
404 408 def branches_closed(self):
405 409 """
406 410 A `dict` which maps tags names to commit ids.
407 411 """
408 412 raise NotImplementedError
409 413
410 414 @LazyProperty
411 415 def bookmarks(self):
412 416 """
413 417 A `dict` which maps tags names to commit ids.
414 418 """
415 419 raise NotImplementedError
416 420
417 421 @LazyProperty
418 422 def tags(self):
419 423 """
420 424 A `dict` which maps tags names to commit ids.
421 425 """
422 426 raise NotImplementedError
423 427
424 428 @LazyProperty
425 429 def size(self):
426 430 """
427 431 Returns combined size in bytes for all repository files
428 432 """
429 433 tip = self.get_commit()
430 434 return tip.size
431 435
432 436 def size_at_commit(self, commit_id):
433 437 commit = self.get_commit(commit_id)
434 438 return commit.size
435 439
436 440 def _check_for_empty(self):
437 441 no_commits = len(self._commit_ids) == 0
438 442 if no_commits:
439 443 # check on remote to be sure
440 444 return self._remote.is_empty()
441 445 else:
442 446 return False
443 447
444 448 def is_empty(self):
445 449 if rhodecode.is_test:
446 450 return self._check_for_empty()
447 451
448 452 if self._is_empty is None:
449 453 # cache empty for production, but not tests
450 454 self._is_empty = self._check_for_empty()
451 455
452 456 return self._is_empty
453 457
454 458 @staticmethod
455 459 def check_url(url, config):
456 460 """
457 461 Function will check given url and try to verify if it's a valid
458 462 link.
459 463 """
460 464 raise NotImplementedError
461 465
462 466 @staticmethod
463 467 def is_valid_repository(path):
464 468 """
465 469 Check if given `path` contains a valid repository of this backend
466 470 """
467 471 raise NotImplementedError
468 472
469 473 # ==========================================================================
470 474 # COMMITS
471 475 # ==========================================================================
472 476
473 477 @CachedProperty
474 478 def commit_ids(self):
475 479 raise NotImplementedError
476 480
477 481 def append_commit_id(self, commit_id):
478 482 if commit_id not in self.commit_ids:
479 483 self._rebuild_cache(self.commit_ids + [commit_id])
480 484
481 485 # clear cache
482 486 self._invalidate_prop_cache('commit_ids')
483 487 self._is_empty = False
484 488
485 489 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
486 translate_tag=None, maybe_unreachable=False):
490 translate_tag=None, maybe_unreachable=False, reference_obj=None):
487 491 """
488 492 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
489 493 are both None, most recent commit is returned.
490 494
491 495 :param pre_load: Optional. List of commit attributes to load.
492 496
493 497 :raises ``EmptyRepositoryError``: if there are no commits
494 498 """
495 499 raise NotImplementedError
496 500
497 501 def __iter__(self):
498 502 for commit_id in self.commit_ids:
499 503 yield self.get_commit(commit_id=commit_id)
500 504
501 505 def get_commits(
502 506 self, start_id=None, end_id=None, start_date=None, end_date=None,
503 507 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
504 508 """
505 509 Returns iterator of `BaseCommit` objects from start to end
506 510 not inclusive. This should behave just like a list, ie. end is not
507 511 inclusive.
508 512
509 513 :param start_id: None or str, must be a valid commit id
510 514 :param end_id: None or str, must be a valid commit id
511 515 :param start_date:
512 516 :param end_date:
513 517 :param branch_name:
514 518 :param show_hidden:
515 519 :param pre_load:
516 520 :param translate_tags:
517 521 """
518 522 raise NotImplementedError
519 523
520 524 def __getitem__(self, key):
521 525 """
522 526 Allows index based access to the commit objects of this repository.
523 527 """
524 528 pre_load = ["author", "branch", "date", "message", "parents"]
525 529 if isinstance(key, slice):
526 530 return self._get_range(key, pre_load)
527 531 return self.get_commit(commit_idx=key, pre_load=pre_load)
528 532
529 533 def _get_range(self, slice_obj, pre_load):
530 534 for commit_id in self.commit_ids.__getitem__(slice_obj):
531 535 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
532 536
533 537 def count(self):
534 538 return len(self.commit_ids)
535 539
536 540 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
537 541 """
538 542 Creates and returns a tag for the given ``commit_id``.
539 543
540 544 :param name: name for new tag
541 545 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
542 546 :param commit_id: commit id for which new tag would be created
543 547 :param message: message of the tag's commit
544 548 :param date: date of tag's commit
545 549
546 550 :raises TagAlreadyExistError: if tag with same name already exists
547 551 """
548 552 raise NotImplementedError
549 553
550 554 def remove_tag(self, name, user, message=None, date=None):
551 555 """
552 556 Removes tag with the given ``name``.
553 557
554 558 :param name: name of the tag to be removed
555 559 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
556 560 :param message: message of the tag's removal commit
557 561 :param date: date of tag's removal commit
558 562
559 563 :raises TagDoesNotExistError: if tag with given name does not exists
560 564 """
561 565 raise NotImplementedError
562 566
563 567 def get_diff(
564 568 self, commit1, commit2, path=None, ignore_whitespace=False,
565 569 context=3, path1=None):
566 570 """
567 571 Returns (git like) *diff*, as plain text. Shows changes introduced by
568 572 `commit2` since `commit1`.
569 573
570 574 :param commit1: Entry point from which diff is shown. Can be
571 575 ``self.EMPTY_COMMIT`` - in this case, patch showing all
572 576 the changes since empty state of the repository until `commit2`
573 577 :param commit2: Until which commit changes should be shown.
574 578 :param path: Can be set to a path of a file to create a diff of that
575 579 file. If `path1` is also set, this value is only associated to
576 580 `commit2`.
577 581 :param ignore_whitespace: If set to ``True``, would not show whitespace
578 582 changes. Defaults to ``False``.
579 583 :param context: How many lines before/after changed lines should be
580 584 shown. Defaults to ``3``.
581 585 :param path1: Can be set to a path to associate with `commit1`. This
582 586 parameter works only for backends which support diff generation for
583 587 different paths. Other backends will raise a `ValueError` if `path1`
584 588 is set and has a different value than `path`.
585 589 :param file_path: filter this diff by given path pattern
586 590 """
587 591 raise NotImplementedError
588 592
589 593 def strip(self, commit_id, branch=None):
590 594 """
591 595 Strip given commit_id from the repository
592 596 """
593 597 raise NotImplementedError
594 598
595 599 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
596 600 """
597 601 Return a latest common ancestor commit if one exists for this repo
598 602 `commit_id1` vs `commit_id2` from `repo2`.
599 603
600 604 :param commit_id1: Commit it from this repository to use as a
601 605 target for the comparison.
602 606 :param commit_id2: Source commit id to use for comparison.
603 607 :param repo2: Source repository to use for comparison.
604 608 """
605 609 raise NotImplementedError
606 610
607 611 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
608 612 """
609 613 Compare this repository's revision `commit_id1` with `commit_id2`.
610 614
611 615 Returns a tuple(commits, ancestor) that would be merged from
612 616 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
613 617 will be returned as ancestor.
614 618
615 619 :param commit_id1: Commit it from this repository to use as a
616 620 target for the comparison.
617 621 :param commit_id2: Source commit id to use for comparison.
618 622 :param repo2: Source repository to use for comparison.
619 623 :param merge: If set to ``True`` will do a merge compare which also
620 624 returns the common ancestor.
621 625 :param pre_load: Optional. List of commit attributes to load.
622 626 """
623 627 raise NotImplementedError
624 628
625 629 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
626 630 user_name='', user_email='', message='', dry_run=False,
627 631 use_rebase=False, close_branch=False):
628 632 """
629 633 Merge the revisions specified in `source_ref` from `source_repo`
630 634 onto the `target_ref` of this repository.
631 635
632 636 `source_ref` and `target_ref` are named tupls with the following
633 637 fields `type`, `name` and `commit_id`.
634 638
635 639 Returns a MergeResponse named tuple with the following fields
636 640 'possible', 'executed', 'source_commit', 'target_commit',
637 641 'merge_commit'.
638 642
639 643 :param repo_id: `repo_id` target repo id.
640 644 :param workspace_id: `workspace_id` unique identifier.
641 645 :param target_ref: `target_ref` points to the commit on top of which
642 646 the `source_ref` should be merged.
643 647 :param source_repo: The repository that contains the commits to be
644 648 merged.
645 649 :param source_ref: `source_ref` points to the topmost commit from
646 650 the `source_repo` which should be merged.
647 651 :param user_name: Merge commit `user_name`.
648 652 :param user_email: Merge commit `user_email`.
649 653 :param message: Merge commit `message`.
650 654 :param dry_run: If `True` the merge will not take place.
651 655 :param use_rebase: If `True` commits from the source will be rebased
652 656 on top of the target instead of being merged.
653 657 :param close_branch: If `True` branch will be close before merging it
654 658 """
655 659 if dry_run:
656 660 message = message or settings.MERGE_DRY_RUN_MESSAGE
657 661 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
658 662 user_name = user_name or settings.MERGE_DRY_RUN_USER
659 663 else:
660 664 if not user_name:
661 665 raise ValueError('user_name cannot be empty')
662 666 if not user_email:
663 667 raise ValueError('user_email cannot be empty')
664 668 if not message:
665 669 raise ValueError('message cannot be empty')
666 670
667 671 try:
668 672 return self._merge_repo(
669 673 repo_id, workspace_id, target_ref, source_repo,
670 674 source_ref, message, user_name, user_email, dry_run=dry_run,
671 675 use_rebase=use_rebase, close_branch=close_branch)
672 676 except RepositoryError as exc:
673 677 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
674 678 return MergeResponse(
675 679 False, False, None, MergeFailureReason.UNKNOWN,
676 680 metadata={'exception': str(exc)})
677 681
678 682 def _merge_repo(self, repo_id, workspace_id, target_ref,
679 683 source_repo, source_ref, merge_message,
680 684 merger_name, merger_email, dry_run=False,
681 685 use_rebase=False, close_branch=False):
682 686 """Internal implementation of merge."""
683 687 raise NotImplementedError
684 688
685 689 def _maybe_prepare_merge_workspace(
686 690 self, repo_id, workspace_id, target_ref, source_ref):
687 691 """
688 692 Create the merge workspace.
689 693
690 694 :param workspace_id: `workspace_id` unique identifier.
691 695 """
692 696 raise NotImplementedError
693 697
694 698 @classmethod
695 699 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
696 700 """
697 701 Legacy version that was used before. We still need it for
698 702 backward compat
699 703 """
700 704 return os.path.join(
701 705 os.path.dirname(repo_path),
702 706 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
703 707
704 708 @classmethod
705 709 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
706 710 # The name of the shadow repository must start with '.', so it is
707 711 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
708 712 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
709 713 if os.path.exists(legacy_repository_path):
710 714 return legacy_repository_path
711 715 else:
712 716 return os.path.join(
713 717 os.path.dirname(repo_path),
714 718 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
715 719
716 720 def cleanup_merge_workspace(self, repo_id, workspace_id):
717 721 """
718 722 Remove merge workspace.
719 723
720 724 This function MUST not fail in case there is no workspace associated to
721 725 the given `workspace_id`.
722 726
723 727 :param workspace_id: `workspace_id` unique identifier.
724 728 """
725 729 shadow_repository_path = self._get_shadow_repository_path(
726 730 self.path, repo_id, workspace_id)
727 731 shadow_repository_path_del = '{}.{}.delete'.format(
728 732 shadow_repository_path, time.time())
729 733
730 734 # move the shadow repo, so it never conflicts with the one used.
731 735 # we use this method because shutil.rmtree had some edge case problems
732 736 # removing symlinked repositories
733 737 if not os.path.isdir(shadow_repository_path):
734 738 return
735 739
736 740 shutil.move(shadow_repository_path, shadow_repository_path_del)
737 741 try:
738 742 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
739 743 except Exception:
740 744 log.exception('Failed to gracefully remove shadow repo under %s',
741 745 shadow_repository_path_del)
742 746 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
743 747
744 748 # ========== #
745 749 # COMMIT API #
746 750 # ========== #
747 751
748 752 @LazyProperty
749 753 def in_memory_commit(self):
750 754 """
751 755 Returns :class:`InMemoryCommit` object for this repository.
752 756 """
753 757 raise NotImplementedError
754 758
755 759 # ======================== #
756 760 # UTILITIES FOR SUBCLASSES #
757 761 # ======================== #
758 762
759 763 def _validate_diff_commits(self, commit1, commit2):
760 764 """
761 765 Validates that the given commits are related to this repository.
762 766
763 767 Intended as a utility for sub classes to have a consistent validation
764 768 of input parameters in methods like :meth:`get_diff`.
765 769 """
766 770 self._validate_commit(commit1)
767 771 self._validate_commit(commit2)
768 772 if (isinstance(commit1, EmptyCommit) and
769 773 isinstance(commit2, EmptyCommit)):
770 774 raise ValueError("Cannot compare two empty commits")
771 775
772 776 def _validate_commit(self, commit):
773 777 if not isinstance(commit, BaseCommit):
774 778 raise TypeError(
775 779 "%s is not of type BaseCommit" % repr(commit))
776 780 if commit.repository != self and not isinstance(commit, EmptyCommit):
777 781 raise ValueError(
778 782 "Commit %s must be a valid commit from this repository %s, "
779 783 "related to this repository instead %s." %
780 784 (commit, self, commit.repository))
781 785
782 786 def _validate_commit_id(self, commit_id):
783 787 if not isinstance(commit_id, compat.string_types):
784 788 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
785 789
786 790 def _validate_commit_idx(self, commit_idx):
787 791 if not isinstance(commit_idx, (int, long)):
788 792 raise TypeError("commit_idx must be a numeric value")
789 793
790 794 def _validate_branch_name(self, branch_name):
791 795 if branch_name and branch_name not in self.branches_all:
792 796 msg = ("Branch %s not found in %s" % (branch_name, self))
793 797 raise BranchDoesNotExistError(msg)
794 798
795 799 #
796 800 # Supporting deprecated API parts
797 801 # TODO: johbo: consider to move this into a mixin
798 802 #
799 803
800 804 @property
801 805 def EMPTY_CHANGESET(self):
802 806 warnings.warn(
803 807 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
804 808 return self.EMPTY_COMMIT_ID
805 809
806 810 @property
807 811 def revisions(self):
808 812 warnings.warn("Use commits attribute instead", DeprecationWarning)
809 813 return self.commit_ids
810 814
811 815 @revisions.setter
812 816 def revisions(self, value):
813 817 warnings.warn("Use commits attribute instead", DeprecationWarning)
814 818 self.commit_ids = value
815 819
816 820 def get_changeset(self, revision=None, pre_load=None):
817 821 warnings.warn("Use get_commit instead", DeprecationWarning)
818 822 commit_id = None
819 823 commit_idx = None
820 824 if isinstance(revision, compat.string_types):
821 825 commit_id = revision
822 826 else:
823 827 commit_idx = revision
824 828 return self.get_commit(
825 829 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
826 830
827 831 def get_changesets(
828 832 self, start=None, end=None, start_date=None, end_date=None,
829 833 branch_name=None, pre_load=None):
830 834 warnings.warn("Use get_commits instead", DeprecationWarning)
831 835 start_id = self._revision_to_commit(start)
832 836 end_id = self._revision_to_commit(end)
833 837 return self.get_commits(
834 838 start_id=start_id, end_id=end_id, start_date=start_date,
835 839 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
836 840
837 841 def _revision_to_commit(self, revision):
838 842 """
839 843 Translates a revision to a commit_id
840 844
841 845 Helps to support the old changeset based API which allows to use
842 846 commit ids and commit indices interchangeable.
843 847 """
844 848 if revision is None:
845 849 return revision
846 850
847 851 if isinstance(revision, compat.string_types):
848 852 commit_id = revision
849 853 else:
850 854 commit_id = self.commit_ids[revision]
851 855 return commit_id
852 856
853 857 @property
854 858 def in_memory_changeset(self):
855 859 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
856 860 return self.in_memory_commit
857 861
858 862 def get_path_permissions(self, username):
859 863 """
860 864 Returns a path permission checker or None if not supported
861 865
862 866 :param username: session user name
863 867 :return: an instance of BasePathPermissionChecker or None
864 868 """
865 869 return None
866 870
867 871 def install_hooks(self, force=False):
868 872 return self._remote.install_hooks(force)
869 873
870 874 def get_hooks_info(self):
871 875 return self._remote.get_hooks_info()
872 876
873 877
874 878 class BaseCommit(object):
875 879 """
876 880 Each backend should implement it's commit representation.
877 881
878 882 **Attributes**
879 883
880 884 ``repository``
881 885 repository object within which commit exists
882 886
883 887 ``id``
884 888 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
885 889 just ``tip``.
886 890
887 891 ``raw_id``
888 892 raw commit representation (i.e. full 40 length sha for git
889 893 backend)
890 894
891 895 ``short_id``
892 896 shortened (if apply) version of ``raw_id``; it would be simple
893 897 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
894 898 as ``raw_id`` for subversion
895 899
896 900 ``idx``
897 901 commit index
898 902
899 903 ``files``
900 904 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
901 905
902 906 ``dirs``
903 907 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
904 908
905 909 ``nodes``
906 910 combined list of ``Node`` objects
907 911
908 912 ``author``
909 913 author of the commit, as unicode
910 914
911 915 ``message``
912 916 message of the commit, as unicode
913 917
914 918 ``parents``
915 919 list of parent commits
916 920
917 921 """
918 922 repository = None
919 923 branch = None
920 924
921 925 """
922 926 Depending on the backend this should be set to the branch name of the
923 927 commit. Backends not supporting branches on commits should leave this
924 928 value as ``None``.
925 929 """
926 930
927 931 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
928 932 """
929 933 This template is used to generate a default prefix for repository archives
930 934 if no prefix has been specified.
931 935 """
932 936
933 937 def __str__(self):
934 938 return '<%s at %s:%s>' % (
935 939 self.__class__.__name__, self.idx, self.short_id)
936 940
937 941 def __repr__(self):
938 942 return self.__str__()
939 943
940 944 def __unicode__(self):
941 945 return u'%s:%s' % (self.idx, self.short_id)
942 946
943 947 def __eq__(self, other):
944 948 same_instance = isinstance(other, self.__class__)
945 949 return same_instance and self.raw_id == other.raw_id
946 950
947 951 def __json__(self):
948 952 parents = []
949 953 try:
950 954 for parent in self.parents:
951 955 parents.append({'raw_id': parent.raw_id})
952 956 except NotImplementedError:
953 957 # empty commit doesn't have parents implemented
954 958 pass
955 959
956 960 return {
957 961 'short_id': self.short_id,
958 962 'raw_id': self.raw_id,
959 963 'revision': self.idx,
960 964 'message': self.message,
961 965 'date': self.date,
962 966 'author': self.author,
963 967 'parents': parents,
964 968 'branch': self.branch
965 969 }
966 970
967 971 def __getstate__(self):
968 972 d = self.__dict__.copy()
969 973 d.pop('_remote', None)
970 974 d.pop('repository', None)
971 975 return d
972 976
973 977 def serialize(self):
974 978 return self.__json__()
975 979
976 980 def _get_refs(self):
977 981 return {
978 982 'branches': [self.branch] if self.branch else [],
979 983 'bookmarks': getattr(self, 'bookmarks', []),
980 984 'tags': self.tags
981 985 }
982 986
983 987 @LazyProperty
984 988 def last(self):
985 989 """
986 990 ``True`` if this is last commit in repository, ``False``
987 991 otherwise; trying to access this attribute while there is no
988 992 commits would raise `EmptyRepositoryError`
989 993 """
990 994 if self.repository is None:
991 995 raise CommitError("Cannot check if it's most recent commit")
992 996 return self.raw_id == self.repository.commit_ids[-1]
993 997
994 998 @LazyProperty
995 999 def parents(self):
996 1000 """
997 1001 Returns list of parent commits.
998 1002 """
999 1003 raise NotImplementedError
1000 1004
1001 1005 @LazyProperty
1002 1006 def first_parent(self):
1003 1007 """
1004 1008 Returns list of parent commits.
1005 1009 """
1006 1010 return self.parents[0] if self.parents else EmptyCommit()
1007 1011
1008 1012 @property
1009 1013 def merge(self):
1010 1014 """
1011 1015 Returns boolean if commit is a merge.
1012 1016 """
1013 1017 return len(self.parents) > 1
1014 1018
1015 1019 @LazyProperty
1016 1020 def children(self):
1017 1021 """
1018 1022 Returns list of child commits.
1019 1023 """
1020 1024 raise NotImplementedError
1021 1025
1022 1026 @LazyProperty
1023 1027 def id(self):
1024 1028 """
1025 1029 Returns string identifying this commit.
1026 1030 """
1027 1031 raise NotImplementedError
1028 1032
1029 1033 @LazyProperty
1030 1034 def raw_id(self):
1031 1035 """
1032 1036 Returns raw string identifying this commit.
1033 1037 """
1034 1038 raise NotImplementedError
1035 1039
1036 1040 @LazyProperty
1037 1041 def short_id(self):
1038 1042 """
1039 1043 Returns shortened version of ``raw_id`` attribute, as string,
1040 1044 identifying this commit, useful for presentation to users.
1041 1045 """
1042 1046 raise NotImplementedError
1043 1047
1044 1048 @LazyProperty
1045 1049 def idx(self):
1046 1050 """
1047 1051 Returns integer identifying this commit.
1048 1052 """
1049 1053 raise NotImplementedError
1050 1054
1051 1055 @LazyProperty
1052 1056 def committer(self):
1053 1057 """
1054 1058 Returns committer for this commit
1055 1059 """
1056 1060 raise NotImplementedError
1057 1061
1058 1062 @LazyProperty
1059 1063 def committer_name(self):
1060 1064 """
1061 1065 Returns committer name for this commit
1062 1066 """
1063 1067
1064 1068 return author_name(self.committer)
1065 1069
1066 1070 @LazyProperty
1067 1071 def committer_email(self):
1068 1072 """
1069 1073 Returns committer email address for this commit
1070 1074 """
1071 1075
1072 1076 return author_email(self.committer)
1073 1077
1074 1078 @LazyProperty
1075 1079 def author(self):
1076 1080 """
1077 1081 Returns author for this commit
1078 1082 """
1079 1083
1080 1084 raise NotImplementedError
1081 1085
1082 1086 @LazyProperty
1083 1087 def author_name(self):
1084 1088 """
1085 1089 Returns author name for this commit
1086 1090 """
1087 1091
1088 1092 return author_name(self.author)
1089 1093
1090 1094 @LazyProperty
1091 1095 def author_email(self):
1092 1096 """
1093 1097 Returns author email address for this commit
1094 1098 """
1095 1099
1096 1100 return author_email(self.author)
1097 1101
1098 1102 def get_file_mode(self, path):
1099 1103 """
1100 1104 Returns stat mode of the file at `path`.
1101 1105 """
1102 1106 raise NotImplementedError
1103 1107
1104 1108 def is_link(self, path):
1105 1109 """
1106 1110 Returns ``True`` if given `path` is a symlink
1107 1111 """
1108 1112 raise NotImplementedError
1109 1113
1110 1114 def is_node_binary(self, path):
1111 1115 """
1112 1116 Returns ``True`` is given path is a binary file
1113 1117 """
1114 1118 raise NotImplementedError
1115 1119
1116 1120 def get_file_content(self, path):
1117 1121 """
1118 1122 Returns content of the file at the given `path`.
1119 1123 """
1120 1124 raise NotImplementedError
1121 1125
1122 1126 def get_file_content_streamed(self, path):
1123 1127 """
1124 1128 returns a streaming response from vcsserver with file content
1125 1129 """
1126 1130 raise NotImplementedError
1127 1131
1128 1132 def get_file_size(self, path):
1129 1133 """
1130 1134 Returns size of the file at the given `path`.
1131 1135 """
1132 1136 raise NotImplementedError
1133 1137
1134 1138 def get_path_commit(self, path, pre_load=None):
1135 1139 """
1136 1140 Returns last commit of the file at the given `path`.
1137 1141
1138 1142 :param pre_load: Optional. List of commit attributes to load.
1139 1143 """
1140 1144 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1141 1145 if not commits:
1142 1146 raise RepositoryError(
1143 1147 'Failed to fetch history for path {}. '
1144 1148 'Please check if such path exists in your repository'.format(
1145 1149 path))
1146 1150 return commits[0]
1147 1151
1148 1152 def get_path_history(self, path, limit=None, pre_load=None):
1149 1153 """
1150 1154 Returns history of file as reversed list of :class:`BaseCommit`
1151 1155 objects for which file at given `path` has been modified.
1152 1156
1153 1157 :param limit: Optional. Allows to limit the size of the returned
1154 1158 history. This is intended as a hint to the underlying backend, so
1155 1159 that it can apply optimizations depending on the limit.
1156 1160 :param pre_load: Optional. List of commit attributes to load.
1157 1161 """
1158 1162 raise NotImplementedError
1159 1163
1160 1164 def get_file_annotate(self, path, pre_load=None):
1161 1165 """
1162 1166 Returns a generator of four element tuples with
1163 1167 lineno, sha, commit lazy loader and line
1164 1168
1165 1169 :param pre_load: Optional. List of commit attributes to load.
1166 1170 """
1167 1171 raise NotImplementedError
1168 1172
1169 1173 def get_nodes(self, path):
1170 1174 """
1171 1175 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1172 1176 state of commit at the given ``path``.
1173 1177
1174 1178 :raises ``CommitError``: if node at the given ``path`` is not
1175 1179 instance of ``DirNode``
1176 1180 """
1177 1181 raise NotImplementedError
1178 1182
1179 1183 def get_node(self, path):
1180 1184 """
1181 1185 Returns ``Node`` object from the given ``path``.
1182 1186
1183 1187 :raises ``NodeDoesNotExistError``: if there is no node at the given
1184 1188 ``path``
1185 1189 """
1186 1190 raise NotImplementedError
1187 1191
1188 1192 def get_largefile_node(self, path):
1189 1193 """
1190 1194 Returns the path to largefile from Mercurial/Git-lfs storage.
1191 1195 or None if it's not a largefile node
1192 1196 """
1193 1197 return None
1194 1198
1195 1199 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1196 1200 archive_dir_name=None, write_metadata=False, mtime=None,
1197 1201 archive_at_path='/'):
1198 1202 """
1199 1203 Creates an archive containing the contents of the repository.
1200 1204
1201 1205 :param archive_dest_path: path to the file which to create the archive.
1202 1206 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1203 1207 :param archive_dir_name: name of root directory in archive.
1204 1208 Default is repository name and commit's short_id joined with dash:
1205 1209 ``"{repo_name}-{short_id}"``.
1206 1210 :param write_metadata: write a metadata file into archive.
1207 1211 :param mtime: custom modification time for archive creation, defaults
1208 1212 to time.time() if not given.
1209 1213 :param archive_at_path: pack files at this path (default '/')
1210 1214
1211 1215 :raise VCSError: If prefix has a problem.
1212 1216 """
1213 1217 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1214 1218 if kind not in allowed_kinds:
1215 1219 raise ImproperArchiveTypeError(
1216 1220 'Archive kind (%s) not supported use one of %s' %
1217 1221 (kind, allowed_kinds))
1218 1222
1219 1223 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1220 1224 mtime = mtime is not None or time.mktime(self.date.timetuple())
1221 1225 commit_id = self.raw_id
1222 1226
1223 1227 return self.repository._remote.archive_repo(
1224 1228 archive_dest_path, kind, mtime, archive_at_path,
1225 1229 archive_dir_name, commit_id)
1226 1230
1227 1231 def _validate_archive_prefix(self, archive_dir_name):
1228 1232 if archive_dir_name is None:
1229 1233 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1230 1234 repo_name=safe_str(self.repository.name),
1231 1235 short_id=self.short_id)
1232 1236 elif not isinstance(archive_dir_name, str):
1233 1237 raise ValueError("prefix not a bytes object: %s" % repr(archive_dir_name))
1234 1238 elif archive_dir_name.startswith('/'):
1235 1239 raise VCSError("Prefix cannot start with leading slash")
1236 1240 elif archive_dir_name.strip() == '':
1237 1241 raise VCSError("Prefix cannot be empty")
1238 1242 return archive_dir_name
1239 1243
1240 1244 @LazyProperty
1241 1245 def root(self):
1242 1246 """
1243 1247 Returns ``RootNode`` object for this commit.
1244 1248 """
1245 1249 return self.get_node('')
1246 1250
1247 1251 def next(self, branch=None):
1248 1252 """
1249 1253 Returns next commit from current, if branch is gives it will return
1250 1254 next commit belonging to this branch
1251 1255
1252 1256 :param branch: show commits within the given named branch
1253 1257 """
1254 1258 indexes = xrange(self.idx + 1, self.repository.count())
1255 1259 return self._find_next(indexes, branch)
1256 1260
1257 1261 def prev(self, branch=None):
1258 1262 """
1259 1263 Returns previous commit from current, if branch is gives it will
1260 1264 return previous commit belonging to this branch
1261 1265
1262 1266 :param branch: show commit within the given named branch
1263 1267 """
1264 1268 indexes = xrange(self.idx - 1, -1, -1)
1265 1269 return self._find_next(indexes, branch)
1266 1270
1267 1271 def _find_next(self, indexes, branch=None):
1268 1272 if branch and self.branch != branch:
1269 1273 raise VCSError('Branch option used on commit not belonging '
1270 1274 'to that branch')
1271 1275
1272 1276 for next_idx in indexes:
1273 1277 commit = self.repository.get_commit(commit_idx=next_idx)
1274 1278 if branch and branch != commit.branch:
1275 1279 continue
1276 1280 return commit
1277 1281 raise CommitDoesNotExistError
1278 1282
1279 1283 def diff(self, ignore_whitespace=True, context=3):
1280 1284 """
1281 1285 Returns a `Diff` object representing the change made by this commit.
1282 1286 """
1283 1287 parent = self.first_parent
1284 1288 diff = self.repository.get_diff(
1285 1289 parent, self,
1286 1290 ignore_whitespace=ignore_whitespace,
1287 1291 context=context)
1288 1292 return diff
1289 1293
1290 1294 @LazyProperty
1291 1295 def added(self):
1292 1296 """
1293 1297 Returns list of added ``FileNode`` objects.
1294 1298 """
1295 1299 raise NotImplementedError
1296 1300
1297 1301 @LazyProperty
1298 1302 def changed(self):
1299 1303 """
1300 1304 Returns list of modified ``FileNode`` objects.
1301 1305 """
1302 1306 raise NotImplementedError
1303 1307
1304 1308 @LazyProperty
1305 1309 def removed(self):
1306 1310 """
1307 1311 Returns list of removed ``FileNode`` objects.
1308 1312 """
1309 1313 raise NotImplementedError
1310 1314
1311 1315 @LazyProperty
1312 1316 def size(self):
1313 1317 """
1314 1318 Returns total number of bytes from contents of all filenodes.
1315 1319 """
1316 1320 return sum((node.size for node in self.get_filenodes_generator()))
1317 1321
1318 1322 def walk(self, topurl=''):
1319 1323 """
1320 1324 Similar to os.walk method. Insted of filesystem it walks through
1321 1325 commit starting at given ``topurl``. Returns generator of tuples
1322 1326 (topnode, dirnodes, filenodes).
1323 1327 """
1324 1328 topnode = self.get_node(topurl)
1325 1329 if not topnode.is_dir():
1326 1330 return
1327 1331 yield (topnode, topnode.dirs, topnode.files)
1328 1332 for dirnode in topnode.dirs:
1329 1333 for tup in self.walk(dirnode.path):
1330 1334 yield tup
1331 1335
1332 1336 def get_filenodes_generator(self):
1333 1337 """
1334 1338 Returns generator that yields *all* file nodes.
1335 1339 """
1336 1340 for topnode, dirs, files in self.walk():
1337 1341 for node in files:
1338 1342 yield node
1339 1343
1340 1344 #
1341 1345 # Utilities for sub classes to support consistent behavior
1342 1346 #
1343 1347
1344 1348 def no_node_at_path(self, path):
1345 1349 return NodeDoesNotExistError(
1346 1350 u"There is no file nor directory at the given path: "
1347 1351 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1348 1352
1349 1353 def _fix_path(self, path):
1350 1354 """
1351 1355 Paths are stored without trailing slash so we need to get rid off it if
1352 1356 needed.
1353 1357 """
1354 1358 return path.rstrip('/')
1355 1359
1356 1360 #
1357 1361 # Deprecated API based on changesets
1358 1362 #
1359 1363
1360 1364 @property
1361 1365 def revision(self):
1362 1366 warnings.warn("Use idx instead", DeprecationWarning)
1363 1367 return self.idx
1364 1368
1365 1369 @revision.setter
1366 1370 def revision(self, value):
1367 1371 warnings.warn("Use idx instead", DeprecationWarning)
1368 1372 self.idx = value
1369 1373
1370 1374 def get_file_changeset(self, path):
1371 1375 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1372 1376 return self.get_path_commit(path)
1373 1377
1374 1378
1375 1379 class BaseChangesetClass(type):
1376 1380
1377 1381 def __instancecheck__(self, instance):
1378 1382 return isinstance(instance, BaseCommit)
1379 1383
1380 1384
1381 1385 class BaseChangeset(BaseCommit):
1382 1386
1383 1387 __metaclass__ = BaseChangesetClass
1384 1388
1385 1389 def __new__(cls, *args, **kwargs):
1386 1390 warnings.warn(
1387 1391 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1388 1392 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1389 1393
1390 1394
1391 1395 class BaseInMemoryCommit(object):
1392 1396 """
1393 1397 Represents differences between repository's state (most recent head) and
1394 1398 changes made *in place*.
1395 1399
1396 1400 **Attributes**
1397 1401
1398 1402 ``repository``
1399 1403 repository object for this in-memory-commit
1400 1404
1401 1405 ``added``
1402 1406 list of ``FileNode`` objects marked as *added*
1403 1407
1404 1408 ``changed``
1405 1409 list of ``FileNode`` objects marked as *changed*
1406 1410
1407 1411 ``removed``
1408 1412 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1409 1413 *removed*
1410 1414
1411 1415 ``parents``
1412 1416 list of :class:`BaseCommit` instances representing parents of
1413 1417 in-memory commit. Should always be 2-element sequence.
1414 1418
1415 1419 """
1416 1420
1417 1421 def __init__(self, repository):
1418 1422 self.repository = repository
1419 1423 self.added = []
1420 1424 self.changed = []
1421 1425 self.removed = []
1422 1426 self.parents = []
1423 1427
1424 1428 def add(self, *filenodes):
1425 1429 """
1426 1430 Marks given ``FileNode`` objects as *to be committed*.
1427 1431
1428 1432 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1429 1433 latest commit
1430 1434 :raises ``NodeAlreadyAddedError``: if node with same path is already
1431 1435 marked as *added*
1432 1436 """
1433 1437 # Check if not already marked as *added* first
1434 1438 for node in filenodes:
1435 1439 if node.path in (n.path for n in self.added):
1436 1440 raise NodeAlreadyAddedError(
1437 1441 "Such FileNode %s is already marked for addition"
1438 1442 % node.path)
1439 1443 for node in filenodes:
1440 1444 self.added.append(node)
1441 1445
1442 1446 def change(self, *filenodes):
1443 1447 """
1444 1448 Marks given ``FileNode`` objects to be *changed* in next commit.
1445 1449
1446 1450 :raises ``EmptyRepositoryError``: if there are no commits yet
1447 1451 :raises ``NodeAlreadyExistsError``: if node with same path is already
1448 1452 marked to be *changed*
1449 1453 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1450 1454 marked to be *removed*
1451 1455 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1452 1456 commit
1453 1457 :raises ``NodeNotChangedError``: if node hasn't really be changed
1454 1458 """
1455 1459 for node in filenodes:
1456 1460 if node.path in (n.path for n in self.removed):
1457 1461 raise NodeAlreadyRemovedError(
1458 1462 "Node at %s is already marked as removed" % node.path)
1459 1463 try:
1460 1464 self.repository.get_commit()
1461 1465 except EmptyRepositoryError:
1462 1466 raise EmptyRepositoryError(
1463 1467 "Nothing to change - try to *add* new nodes rather than "
1464 1468 "changing them")
1465 1469 for node in filenodes:
1466 1470 if node.path in (n.path for n in self.changed):
1467 1471 raise NodeAlreadyChangedError(
1468 1472 "Node at '%s' is already marked as changed" % node.path)
1469 1473 self.changed.append(node)
1470 1474
1471 1475 def remove(self, *filenodes):
1472 1476 """
1473 1477 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1474 1478 *removed* in next commit.
1475 1479
1476 1480 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1477 1481 be *removed*
1478 1482 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1479 1483 be *changed*
1480 1484 """
1481 1485 for node in filenodes:
1482 1486 if node.path in (n.path for n in self.removed):
1483 1487 raise NodeAlreadyRemovedError(
1484 1488 "Node is already marked to for removal at %s" % node.path)
1485 1489 if node.path in (n.path for n in self.changed):
1486 1490 raise NodeAlreadyChangedError(
1487 1491 "Node is already marked to be changed at %s" % node.path)
1488 1492 # We only mark node as *removed* - real removal is done by
1489 1493 # commit method
1490 1494 self.removed.append(node)
1491 1495
1492 1496 def reset(self):
1493 1497 """
1494 1498 Resets this instance to initial state (cleans ``added``, ``changed``
1495 1499 and ``removed`` lists).
1496 1500 """
1497 1501 self.added = []
1498 1502 self.changed = []
1499 1503 self.removed = []
1500 1504 self.parents = []
1501 1505
1502 1506 def get_ipaths(self):
1503 1507 """
1504 1508 Returns generator of paths from nodes marked as added, changed or
1505 1509 removed.
1506 1510 """
1507 1511 for node in itertools.chain(self.added, self.changed, self.removed):
1508 1512 yield node.path
1509 1513
1510 1514 def get_paths(self):
1511 1515 """
1512 1516 Returns list of paths from nodes marked as added, changed or removed.
1513 1517 """
1514 1518 return list(self.get_ipaths())
1515 1519
1516 1520 def check_integrity(self, parents=None):
1517 1521 """
1518 1522 Checks in-memory commit's integrity. Also, sets parents if not
1519 1523 already set.
1520 1524
1521 1525 :raises CommitError: if any error occurs (i.e.
1522 1526 ``NodeDoesNotExistError``).
1523 1527 """
1524 1528 if not self.parents:
1525 1529 parents = parents or []
1526 1530 if len(parents) == 0:
1527 1531 try:
1528 1532 parents = [self.repository.get_commit(), None]
1529 1533 except EmptyRepositoryError:
1530 1534 parents = [None, None]
1531 1535 elif len(parents) == 1:
1532 1536 parents += [None]
1533 1537 self.parents = parents
1534 1538
1535 1539 # Local parents, only if not None
1536 1540 parents = [p for p in self.parents if p]
1537 1541
1538 1542 # Check nodes marked as added
1539 1543 for p in parents:
1540 1544 for node in self.added:
1541 1545 try:
1542 1546 p.get_node(node.path)
1543 1547 except NodeDoesNotExistError:
1544 1548 pass
1545 1549 else:
1546 1550 raise NodeAlreadyExistsError(
1547 1551 "Node `%s` already exists at %s" % (node.path, p))
1548 1552
1549 1553 # Check nodes marked as changed
1550 1554 missing = set(self.changed)
1551 1555 not_changed = set(self.changed)
1552 1556 if self.changed and not parents:
1553 1557 raise NodeDoesNotExistError(str(self.changed[0].path))
1554 1558 for p in parents:
1555 1559 for node in self.changed:
1556 1560 try:
1557 1561 old = p.get_node(node.path)
1558 1562 missing.remove(node)
1559 1563 # if content actually changed, remove node from not_changed
1560 1564 if old.content != node.content:
1561 1565 not_changed.remove(node)
1562 1566 except NodeDoesNotExistError:
1563 1567 pass
1564 1568 if self.changed and missing:
1565 1569 raise NodeDoesNotExistError(
1566 1570 "Node `%s` marked as modified but missing in parents: %s"
1567 1571 % (node.path, parents))
1568 1572
1569 1573 if self.changed and not_changed:
1570 1574 raise NodeNotChangedError(
1571 1575 "Node `%s` wasn't actually changed (parents: %s)"
1572 1576 % (not_changed.pop().path, parents))
1573 1577
1574 1578 # Check nodes marked as removed
1575 1579 if self.removed and not parents:
1576 1580 raise NodeDoesNotExistError(
1577 1581 "Cannot remove node at %s as there "
1578 1582 "were no parents specified" % self.removed[0].path)
1579 1583 really_removed = set()
1580 1584 for p in parents:
1581 1585 for node in self.removed:
1582 1586 try:
1583 1587 p.get_node(node.path)
1584 1588 really_removed.add(node)
1585 1589 except CommitError:
1586 1590 pass
1587 1591 not_removed = set(self.removed) - really_removed
1588 1592 if not_removed:
1589 1593 # TODO: johbo: This code branch does not seem to be covered
1590 1594 raise NodeDoesNotExistError(
1591 1595 "Cannot remove node at %s from "
1592 1596 "following parents: %s" % (not_removed, parents))
1593 1597
1594 1598 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1595 1599 """
1596 1600 Performs in-memory commit (doesn't check workdir in any way) and
1597 1601 returns newly created :class:`BaseCommit`. Updates repository's
1598 1602 attribute `commits`.
1599 1603
1600 1604 .. note::
1601 1605
1602 1606 While overriding this method each backend's should call
1603 1607 ``self.check_integrity(parents)`` in the first place.
1604 1608
1605 1609 :param message: message of the commit
1606 1610 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1607 1611 :param parents: single parent or sequence of parents from which commit
1608 1612 would be derived
1609 1613 :param date: ``datetime.datetime`` instance. Defaults to
1610 1614 ``datetime.datetime.now()``.
1611 1615 :param branch: branch name, as string. If none given, default backend's
1612 1616 branch would be used.
1613 1617
1614 1618 :raises ``CommitError``: if any error occurs while committing
1615 1619 """
1616 1620 raise NotImplementedError
1617 1621
1618 1622
1619 1623 class BaseInMemoryChangesetClass(type):
1620 1624
1621 1625 def __instancecheck__(self, instance):
1622 1626 return isinstance(instance, BaseInMemoryCommit)
1623 1627
1624 1628
1625 1629 class BaseInMemoryChangeset(BaseInMemoryCommit):
1626 1630
1627 1631 __metaclass__ = BaseInMemoryChangesetClass
1628 1632
1629 1633 def __new__(cls, *args, **kwargs):
1630 1634 warnings.warn(
1631 1635 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1632 1636 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1633 1637
1634 1638
1635 1639 class EmptyCommit(BaseCommit):
1636 1640 """
1637 1641 An dummy empty commit. It's possible to pass hash when creating
1638 1642 an EmptyCommit
1639 1643 """
1640 1644
1641 1645 def __init__(
1642 1646 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1643 1647 message='', author='', date=None):
1644 1648 self._empty_commit_id = commit_id
1645 1649 # TODO: johbo: Solve idx parameter, default value does not make
1646 1650 # too much sense
1647 1651 self.idx = idx
1648 1652 self.message = message
1649 1653 self.author = author
1650 1654 self.date = date or datetime.datetime.fromtimestamp(0)
1651 1655 self.repository = repo
1652 1656 self.alias = alias
1653 1657
1654 1658 @LazyProperty
1655 1659 def raw_id(self):
1656 1660 """
1657 1661 Returns raw string identifying this commit, useful for web
1658 1662 representation.
1659 1663 """
1660 1664
1661 1665 return self._empty_commit_id
1662 1666
1663 1667 @LazyProperty
1664 1668 def branch(self):
1665 1669 if self.alias:
1666 1670 from rhodecode.lib.vcs.backends import get_backend
1667 1671 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1668 1672
1669 1673 @LazyProperty
1670 1674 def short_id(self):
1671 1675 return self.raw_id[:12]
1672 1676
1673 1677 @LazyProperty
1674 1678 def id(self):
1675 1679 return self.raw_id
1676 1680
1677 1681 def get_path_commit(self, path):
1678 1682 return self
1679 1683
1680 1684 def get_file_content(self, path):
1681 1685 return u''
1682 1686
1683 1687 def get_file_content_streamed(self, path):
1684 1688 yield self.get_file_content()
1685 1689
1686 1690 def get_file_size(self, path):
1687 1691 return 0
1688 1692
1689 1693
1690 1694 class EmptyChangesetClass(type):
1691 1695
1692 1696 def __instancecheck__(self, instance):
1693 1697 return isinstance(instance, EmptyCommit)
1694 1698
1695 1699
1696 1700 class EmptyChangeset(EmptyCommit):
1697 1701
1698 1702 __metaclass__ = EmptyChangesetClass
1699 1703
1700 1704 def __new__(cls, *args, **kwargs):
1701 1705 warnings.warn(
1702 1706 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1703 1707 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1704 1708
1705 1709 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1706 1710 alias=None, revision=-1, message='', author='', date=None):
1707 1711 if requested_revision is not None:
1708 1712 warnings.warn(
1709 1713 "Parameter requested_revision not supported anymore",
1710 1714 DeprecationWarning)
1711 1715 super(EmptyChangeset, self).__init__(
1712 1716 commit_id=cs, repo=repo, alias=alias, idx=revision,
1713 1717 message=message, author=author, date=date)
1714 1718
1715 1719 @property
1716 1720 def revision(self):
1717 1721 warnings.warn("Use idx instead", DeprecationWarning)
1718 1722 return self.idx
1719 1723
1720 1724 @revision.setter
1721 1725 def revision(self, value):
1722 1726 warnings.warn("Use idx instead", DeprecationWarning)
1723 1727 self.idx = value
1724 1728
1725 1729
1726 1730 class EmptyRepository(BaseRepository):
1727 1731 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1728 1732 pass
1729 1733
1730 1734 def get_diff(self, *args, **kwargs):
1731 1735 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1732 1736 return GitDiff('')
1733 1737
1734 1738
1735 1739 class CollectionGenerator(object):
1736 1740
1737 1741 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1738 1742 self.repo = repo
1739 1743 self.commit_ids = commit_ids
1740 1744 # TODO: (oliver) this isn't currently hooked up
1741 1745 self.collection_size = None
1742 1746 self.pre_load = pre_load
1743 1747 self.translate_tag = translate_tag
1744 1748
1745 1749 def __len__(self):
1746 1750 if self.collection_size is not None:
1747 1751 return self.collection_size
1748 1752 return self.commit_ids.__len__()
1749 1753
1750 1754 def __iter__(self):
1751 1755 for commit_id in self.commit_ids:
1752 1756 # TODO: johbo: Mercurial passes in commit indices or commit ids
1753 1757 yield self._commit_factory(commit_id)
1754 1758
1755 1759 def _commit_factory(self, commit_id):
1756 1760 """
1757 1761 Allows backends to override the way commits are generated.
1758 1762 """
1759 1763 return self.repo.get_commit(
1760 1764 commit_id=commit_id, pre_load=self.pre_load,
1761 1765 translate_tag=self.translate_tag)
1762 1766
1763 1767 def __getslice__(self, i, j):
1764 1768 """
1765 1769 Returns an iterator of sliced repository
1766 1770 """
1767 1771 commit_ids = self.commit_ids[i:j]
1768 1772 return self.__class__(
1769 1773 self.repo, commit_ids, pre_load=self.pre_load,
1770 1774 translate_tag=self.translate_tag)
1771 1775
1772 1776 def __repr__(self):
1773 1777 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1774 1778
1775 1779
1776 1780 class Config(object):
1777 1781 """
1778 1782 Represents the configuration for a repository.
1779 1783
1780 1784 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1781 1785 standard library. It implements only the needed subset.
1782 1786 """
1783 1787
1784 1788 def __init__(self):
1785 1789 self._values = {}
1786 1790
1787 1791 def copy(self):
1788 1792 clone = Config()
1789 1793 for section, values in self._values.items():
1790 1794 clone._values[section] = values.copy()
1791 1795 return clone
1792 1796
1793 1797 def __repr__(self):
1794 1798 return '<Config(%s sections) at %s>' % (
1795 1799 len(self._values), hex(id(self)))
1796 1800
1797 1801 def items(self, section):
1798 1802 return self._values.get(section, {}).iteritems()
1799 1803
1800 1804 def get(self, section, option):
1801 1805 return self._values.get(section, {}).get(option)
1802 1806
1803 1807 def set(self, section, option, value):
1804 1808 section_values = self._values.setdefault(section, {})
1805 1809 section_values[option] = value
1806 1810
1807 1811 def clear_section(self, section):
1808 1812 self._values[section] = {}
1809 1813
1810 1814 def serialize(self):
1811 1815 """
1812 1816 Creates a list of three tuples (section, key, value) representing
1813 1817 this config object.
1814 1818 """
1815 1819 items = []
1816 1820 for section in self._values:
1817 1821 for option, value in self._values[section].items():
1818 1822 items.append(
1819 1823 (safe_str(section), safe_str(option), safe_str(value)))
1820 1824 return items
1821 1825
1822 1826
1823 1827 class Diff(object):
1824 1828 """
1825 1829 Represents a diff result from a repository backend.
1826 1830
1827 1831 Subclasses have to provide a backend specific value for
1828 1832 :attr:`_header_re` and :attr:`_meta_re`.
1829 1833 """
1830 1834 _meta_re = None
1831 1835 _header_re = None
1832 1836
1833 1837 def __init__(self, raw_diff):
1834 1838 self.raw = raw_diff
1835 1839
1836 1840 def chunks(self):
1837 1841 """
1838 1842 split the diff in chunks of separate --git a/file b/file chunks
1839 1843 to make diffs consistent we must prepend with \n, and make sure
1840 1844 we can detect last chunk as this was also has special rule
1841 1845 """
1842 1846
1843 1847 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1844 1848 header = diff_parts[0]
1845 1849
1846 1850 if self._meta_re:
1847 1851 match = self._meta_re.match(header)
1848 1852
1849 1853 chunks = diff_parts[1:]
1850 1854 total_chunks = len(chunks)
1851 1855
1852 1856 return (
1853 1857 DiffChunk(chunk, self, cur_chunk == total_chunks)
1854 1858 for cur_chunk, chunk in enumerate(chunks, start=1))
1855 1859
1856 1860
1857 1861 class DiffChunk(object):
1858 1862
1859 1863 def __init__(self, chunk, diff, last_chunk):
1860 1864 self._diff = diff
1861 1865
1862 1866 # since we split by \ndiff --git that part is lost from original diff
1863 1867 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1864 1868 if not last_chunk:
1865 1869 chunk += '\n'
1866 1870
1867 1871 match = self._diff._header_re.match(chunk)
1868 1872 self.header = match.groupdict()
1869 1873 self.diff = chunk[match.end():]
1870 1874 self.raw = chunk
1871 1875
1872 1876
1873 1877 class BasePathPermissionChecker(object):
1874 1878
1875 1879 @staticmethod
1876 1880 def create_from_patterns(includes, excludes):
1877 1881 if includes and '*' in includes and not excludes:
1878 1882 return AllPathPermissionChecker()
1879 1883 elif excludes and '*' in excludes:
1880 1884 return NonePathPermissionChecker()
1881 1885 else:
1882 1886 return PatternPathPermissionChecker(includes, excludes)
1883 1887
1884 1888 @property
1885 1889 def has_full_access(self):
1886 1890 raise NotImplemented()
1887 1891
1888 1892 def has_access(self, path):
1889 1893 raise NotImplemented()
1890 1894
1891 1895
1892 1896 class AllPathPermissionChecker(BasePathPermissionChecker):
1893 1897
1894 1898 @property
1895 1899 def has_full_access(self):
1896 1900 return True
1897 1901
1898 1902 def has_access(self, path):
1899 1903 return True
1900 1904
1901 1905
1902 1906 class NonePathPermissionChecker(BasePathPermissionChecker):
1903 1907
1904 1908 @property
1905 1909 def has_full_access(self):
1906 1910 return False
1907 1911
1908 1912 def has_access(self, path):
1909 1913 return False
1910 1914
1911 1915
1912 1916 class PatternPathPermissionChecker(BasePathPermissionChecker):
1913 1917
1914 1918 def __init__(self, includes, excludes):
1915 1919 self.includes = includes
1916 1920 self.excludes = excludes
1917 1921 self.includes_re = [] if not includes else [
1918 1922 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1919 1923 self.excludes_re = [] if not excludes else [
1920 1924 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1921 1925
1922 1926 @property
1923 1927 def has_full_access(self):
1924 1928 return '*' in self.includes and not self.excludes
1925 1929
1926 1930 def has_access(self, path):
1927 1931 for regex in self.excludes_re:
1928 1932 if regex.match(path):
1929 1933 return False
1930 1934 for regex in self.includes_re:
1931 1935 if regex.match(path):
1932 1936 return True
1933 1937 return False
@@ -1,494 +1,496 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 33 from rhodecode.lib.utils import safe_unicode, safe_str
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs.conf import settings
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 38 from rhodecode.lib.vcs.nodes import (
39 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 41 RemovedFileNodesGenerator, LargeFileNode)
42 42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49
50 50 _filter_pre_load = [
51 51 # done through a more complex tree walk on parents
52 52 "affected_files",
53 53 # done through subprocess not remote call
54 54 "children",
55 55 # done through a more complex tree walk on parents
56 56 "status",
57 57 # mercurial specific property not supported here
58 58 "_file_paths",
59 59 # mercurial specific property not supported here
60 60 'obsolete',
61 61 # mercurial specific property not supported here
62 62 'phase',
63 63 # mercurial specific property not supported here
64 64 'hidden'
65 65 ]
66 66
67 67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 68 self.repository = repository
69 69 self._remote = repository._remote
70 70 # TODO: johbo: Tweak of raw_id should not be necessary
71 71 self.raw_id = safe_str(raw_id)
72 72 self.idx = idx
73 73
74 74 self._set_bulk_properties(pre_load)
75 75
76 76 # caches
77 77 self._stat_modes = {} # stat info for paths
78 78 self._paths = {} # path processed with parse_tree
79 79 self.nodes = {}
80 80 self._submodules = None
81 81
82 82 def _set_bulk_properties(self, pre_load):
83 83
84 84 if not pre_load:
85 85 return
86 86 pre_load = [entry for entry in pre_load
87 87 if entry not in self._filter_pre_load]
88 88 if not pre_load:
89 89 return
90 90
91 91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 92 for attr, value in result.items():
93 93 if attr in ["author", "message"]:
94 94 if value:
95 95 value = safe_unicode(value)
96 96 elif attr == "date":
97 97 value = utcdate_fromtimestamp(*value)
98 98 elif attr == "parents":
99 99 value = self._make_commits(value)
100 100 elif attr == "branch":
101 value = value[0] if value else None
101 value = self._set_branch(value)
102 102 self.__dict__[attr] = value
103 103
104 104 @LazyProperty
105 105 def _commit(self):
106 106 return self._remote[self.raw_id]
107 107
108 108 @LazyProperty
109 109 def _tree_id(self):
110 110 return self._remote[self._commit['tree']]['id']
111 111
112 112 @LazyProperty
113 113 def id(self):
114 114 return self.raw_id
115 115
116 116 @LazyProperty
117 117 def short_id(self):
118 118 return self.raw_id[:12]
119 119
120 120 @LazyProperty
121 121 def message(self):
122 122 return safe_unicode(self._remote.message(self.id))
123 123
124 124 @LazyProperty
125 125 def committer(self):
126 126 return safe_unicode(self._remote.author(self.id))
127 127
128 128 @LazyProperty
129 129 def author(self):
130 130 return safe_unicode(self._remote.author(self.id))
131 131
132 132 @LazyProperty
133 133 def date(self):
134 134 unix_ts, tz = self._remote.date(self.raw_id)
135 135 return utcdate_fromtimestamp(unix_ts, tz)
136 136
137 137 @LazyProperty
138 138 def status(self):
139 139 """
140 140 Returns modified, added, removed, deleted files for current commit
141 141 """
142 142 return self.changed, self.added, self.removed
143 143
144 144 @LazyProperty
145 145 def tags(self):
146 146 tags = [safe_unicode(name) for name,
147 147 commit_id in self.repository.tags.iteritems()
148 148 if commit_id == self.raw_id]
149 149 return tags
150 150
151 151 @LazyProperty
152 152 def commit_branches(self):
153 153 branches = []
154 154 for name, commit_id in self.repository.branches.iteritems():
155 155 if commit_id == self.raw_id:
156 156 branches.append(name)
157 157 return branches
158 158
159 def _set_branch(self, branches):
160 if branches:
161 # actually commit can have multiple branches in git
162 return safe_unicode(branches[0])
163
159 164 @LazyProperty
160 165 def branch(self):
161 166 branches = self._remote.branch(self.raw_id)
162
163 if branches:
164 # actually commit can have multiple branches in git
165 return safe_unicode(branches[0])
167 return self._set_branch(branches)
166 168
167 169 def _get_tree_id_for_path(self, path):
168 170 path = safe_str(path)
169 171 if path in self._paths:
170 172 return self._paths[path]
171 173
172 174 tree_id = self._tree_id
173 175
174 176 path = path.strip('/')
175 177 if path == '':
176 178 data = [tree_id, "tree"]
177 179 self._paths[''] = data
178 180 return data
179 181
180 182 tree_id, tree_type, tree_mode = \
181 183 self._remote.tree_and_type_for_path(self.raw_id, path)
182 184 if tree_id is None:
183 185 raise self.no_node_at_path(path)
184 186
185 187 self._paths[path] = [tree_id, tree_type]
186 188 self._stat_modes[path] = tree_mode
187 189
188 190 if path not in self._paths:
189 191 raise self.no_node_at_path(path)
190 192
191 193 return self._paths[path]
192 194
193 195 def _get_kind(self, path):
194 196 tree_id, type_ = self._get_tree_id_for_path(path)
195 197 if type_ == 'blob':
196 198 return NodeKind.FILE
197 199 elif type_ == 'tree':
198 200 return NodeKind.DIR
199 201 elif type_ == 'link':
200 202 return NodeKind.SUBMODULE
201 203 return None
202 204
203 205 def _get_filectx(self, path):
204 206 path = self._fix_path(path)
205 207 if self._get_kind(path) != NodeKind.FILE:
206 208 raise CommitError(
207 209 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 210 return path
209 211
210 212 def _get_file_nodes(self):
211 213 return chain(*(t[2] for t in self.walk()))
212 214
213 215 @LazyProperty
214 216 def parents(self):
215 217 """
216 218 Returns list of parent commits.
217 219 """
218 220 parent_ids = self._remote.parents(self.id)
219 221 return self._make_commits(parent_ids)
220 222
221 223 @LazyProperty
222 224 def children(self):
223 225 """
224 226 Returns list of child commits.
225 227 """
226 228
227 229 children = self._remote.children(self.raw_id)
228 230 return self._make_commits(children)
229 231
230 232 def _make_commits(self, commit_ids):
231 233 def commit_maker(_commit_id):
232 234 return self.repository.get_commit(commit_id=commit_id)
233 235
234 236 return [commit_maker(commit_id) for commit_id in commit_ids]
235 237
236 238 def get_file_mode(self, path):
237 239 """
238 240 Returns stat mode of the file at the given `path`.
239 241 """
240 242 path = safe_str(path)
241 243 # ensure path is traversed
242 244 self._get_tree_id_for_path(path)
243 245 return self._stat_modes[path]
244 246
245 247 def is_link(self, path):
246 248 return stat.S_ISLNK(self.get_file_mode(path))
247 249
248 250 def is_node_binary(self, path):
249 251 tree_id, _ = self._get_tree_id_for_path(path)
250 252 return self._remote.is_binary(tree_id)
251 253
252 254 def get_file_content(self, path):
253 255 """
254 256 Returns content of the file at given `path`.
255 257 """
256 258 tree_id, _ = self._get_tree_id_for_path(path)
257 259 return self._remote.blob_as_pretty_string(tree_id)
258 260
259 261 def get_file_content_streamed(self, path):
260 262 tree_id, _ = self._get_tree_id_for_path(path)
261 263 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
262 264 return stream_method(tree_id)
263 265
264 266 def get_file_size(self, path):
265 267 """
266 268 Returns size of the file at given `path`.
267 269 """
268 270 tree_id, _ = self._get_tree_id_for_path(path)
269 271 return self._remote.blob_raw_length(tree_id)
270 272
271 273 def get_path_history(self, path, limit=None, pre_load=None):
272 274 """
273 275 Returns history of file as reversed list of `GitCommit` objects for
274 276 which file at given `path` has been modified.
275 277 """
276 278
277 279 path = self._get_filectx(path)
278 280 hist = self._remote.node_history(self.raw_id, path, limit)
279 281 return [
280 282 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
281 283 for commit_id in hist]
282 284
283 285 def get_file_annotate(self, path, pre_load=None):
284 286 """
285 287 Returns a generator of four element tuples with
286 288 lineno, commit_id, commit lazy loader and line
287 289 """
288 290
289 291 result = self._remote.node_annotate(self.raw_id, path)
290 292
291 293 for ln_no, commit_id, content in result:
292 294 yield (
293 295 ln_no, commit_id,
294 296 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
295 297 content)
296 298
297 299 def get_nodes(self, path):
298 300
299 301 if self._get_kind(path) != NodeKind.DIR:
300 302 raise CommitError(
301 303 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
302 304 path = self._fix_path(path)
303 305
304 306 tree_id, _ = self._get_tree_id_for_path(path)
305 307
306 308 dirnodes = []
307 309 filenodes = []
308 310
309 311 # extracted tree ID gives us our files...
310 312 bytes_path = safe_str(path) # libgit operates on bytes
311 313 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
312 314 if type_ == 'link':
313 315 url = self._get_submodule_url('/'.join((bytes_path, name)))
314 316 dirnodes.append(SubModuleNode(
315 317 name, url=url, commit=id_, alias=self.repository.alias))
316 318 continue
317 319
318 320 if bytes_path != '':
319 321 obj_path = '/'.join((bytes_path, name))
320 322 else:
321 323 obj_path = name
322 324 if obj_path not in self._stat_modes:
323 325 self._stat_modes[obj_path] = stat_
324 326
325 327 if type_ == 'tree':
326 328 dirnodes.append(DirNode(obj_path, commit=self))
327 329 elif type_ == 'blob':
328 330 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
329 331 else:
330 332 raise CommitError(
331 333 "Requested object should be Tree or Blob, is %s", type_)
332 334
333 335 nodes = dirnodes + filenodes
334 336 for node in nodes:
335 337 if node.path not in self.nodes:
336 338 self.nodes[node.path] = node
337 339 nodes.sort()
338 340 return nodes
339 341
340 342 def get_node(self, path, pre_load=None):
341 343 if isinstance(path, unicode):
342 344 path = path.encode('utf-8')
343 345 path = self._fix_path(path)
344 346 if path not in self.nodes:
345 347 try:
346 348 tree_id, type_ = self._get_tree_id_for_path(path)
347 349 except CommitError:
348 350 raise NodeDoesNotExistError(
349 351 "Cannot find one of parents' directories for a given "
350 352 "path: %s" % path)
351 353
352 354 if type_ in ['link', 'commit']:
353 355 url = self._get_submodule_url(path)
354 356 node = SubModuleNode(path, url=url, commit=tree_id,
355 357 alias=self.repository.alias)
356 358 elif type_ == 'tree':
357 359 if path == '':
358 360 node = RootNode(commit=self)
359 361 else:
360 362 node = DirNode(path, commit=self)
361 363 elif type_ == 'blob':
362 364 node = FileNode(path, commit=self, pre_load=pre_load)
363 365 self._stat_modes[path] = node.mode
364 366 else:
365 367 raise self.no_node_at_path(path)
366 368
367 369 # cache node
368 370 self.nodes[path] = node
369 371
370 372 return self.nodes[path]
371 373
372 374 def get_largefile_node(self, path):
373 375 tree_id, _ = self._get_tree_id_for_path(path)
374 376 pointer_spec = self._remote.is_large_file(tree_id)
375 377
376 378 if pointer_spec:
377 379 # content of that file regular FileNode is the hash of largefile
378 380 file_id = pointer_spec.get('oid_hash')
379 381 if self._remote.in_largefiles_store(file_id):
380 382 lf_path = self._remote.store_path(file_id)
381 383 return LargeFileNode(lf_path, commit=self, org_path=path)
382 384
383 385 @LazyProperty
384 386 def affected_files(self):
385 387 """
386 388 Gets a fast accessible file changes for given commit
387 389 """
388 390 added, modified, deleted = self._changes_cache
389 391 return list(added.union(modified).union(deleted))
390 392
391 393 @LazyProperty
392 394 def _changes_cache(self):
393 395 added = set()
394 396 modified = set()
395 397 deleted = set()
396 398 _r = self._remote
397 399
398 400 parents = self.parents
399 401 if not self.parents:
400 402 parents = [base.EmptyCommit()]
401 403 for parent in parents:
402 404 if isinstance(parent, base.EmptyCommit):
403 405 oid = None
404 406 else:
405 407 oid = parent.raw_id
406 408 changes = _r.tree_changes(oid, self.raw_id)
407 409 for (oldpath, newpath), (_, _), (_, _) in changes:
408 410 if newpath and oldpath:
409 411 modified.add(newpath)
410 412 elif newpath and not oldpath:
411 413 added.add(newpath)
412 414 elif not newpath and oldpath:
413 415 deleted.add(oldpath)
414 416 return added, modified, deleted
415 417
416 418 def _get_paths_for_status(self, status):
417 419 """
418 420 Returns sorted list of paths for given ``status``.
419 421
420 422 :param status: one of: *added*, *modified* or *deleted*
421 423 """
422 424 added, modified, deleted = self._changes_cache
423 425 return sorted({
424 426 'added': list(added),
425 427 'modified': list(modified),
426 428 'deleted': list(deleted)}[status]
427 429 )
428 430
429 431 @LazyProperty
430 432 def added(self):
431 433 """
432 434 Returns list of added ``FileNode`` objects.
433 435 """
434 436 if not self.parents:
435 437 return list(self._get_file_nodes())
436 438 return AddedFileNodesGenerator(self.added_paths, self)
437 439
438 440 @LazyProperty
439 441 def added_paths(self):
440 442 return [n for n in self._get_paths_for_status('added')]
441 443
442 444 @LazyProperty
443 445 def changed(self):
444 446 """
445 447 Returns list of modified ``FileNode`` objects.
446 448 """
447 449 if not self.parents:
448 450 return []
449 451 return ChangedFileNodesGenerator(self.changed_paths, self)
450 452
451 453 @LazyProperty
452 454 def changed_paths(self):
453 455 return [n for n in self._get_paths_for_status('modified')]
454 456
455 457 @LazyProperty
456 458 def removed(self):
457 459 """
458 460 Returns list of removed ``FileNode`` objects.
459 461 """
460 462 if not self.parents:
461 463 return []
462 464 return RemovedFileNodesGenerator(self.removed_paths, self)
463 465
464 466 @LazyProperty
465 467 def removed_paths(self):
466 468 return [n for n in self._get_paths_for_status('deleted')]
467 469
468 470 def _get_submodule_url(self, submodule_path):
469 471 git_modules_path = '.gitmodules'
470 472
471 473 if self._submodules is None:
472 474 self._submodules = {}
473 475
474 476 try:
475 477 submodules_node = self.get_node(git_modules_path)
476 478 except NodeDoesNotExistError:
477 479 return None
478 480
479 481 # ConfigParser fails if there are whitespaces, also it needs an iterable
480 482 # file like content
481 483 def iter_content(_content):
482 484 for line in _content.splitlines():
483 485 yield line
484 486
485 487 parser = configparser.RawConfigParser()
486 488 parser.read_file(iter_content(submodules_node.content))
487 489
488 490 for section in parser.sections():
489 491 path = parser.get(section, 'path')
490 492 url = parser.get(section, 'url')
491 493 if path and url:
492 494 self._submodules[path.strip('/')] = url
493 495
494 496 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1034 +1,1051 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'master'
58 58
59 59 contact = BaseRepository.DEFAULT_CONTACT
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63
64 64 self.path = safe_str(os.path.abspath(repo_path))
65 65 self.config = config if config else self.get_default_config()
66 66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 67
68 68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 69
70 70 # caches
71 71 self._commit_ids = {}
72 72
73 73 @LazyProperty
74 74 def _remote(self):
75 75 repo_id = self.path
76 76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 77
78 78 @LazyProperty
79 79 def bare(self):
80 80 return self._remote.bare()
81 81
82 82 @LazyProperty
83 83 def head(self):
84 84 return self._remote.head()
85 85
86 86 @CachedProperty
87 87 def commit_ids(self):
88 88 """
89 89 Returns list of commit ids, in ascending order. Being lazy
90 90 attribute allows external tools to inject commit ids from cache.
91 91 """
92 92 commit_ids = self._get_all_commit_ids()
93 93 self._rebuild_cache(commit_ids)
94 94 return commit_ids
95 95
96 96 def _rebuild_cache(self, commit_ids):
97 97 self._commit_ids = dict((commit_id, index)
98 98 for index, commit_id in enumerate(commit_ids))
99 99
100 100 def run_git_command(self, cmd, **opts):
101 101 """
102 102 Runs given ``cmd`` as git command and returns tuple
103 103 (stdout, stderr).
104 104
105 105 :param cmd: git command to be executed
106 106 :param opts: env options to pass into Subprocess command
107 107 """
108 108 if not isinstance(cmd, list):
109 109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 110
111 111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 112 out, err = self._remote.run_git_command(cmd, **opts)
113 113 if err and not skip_stderr_log:
114 114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 115 return out, err
116 116
117 117 @staticmethod
118 118 def check_url(url, config):
119 119 """
120 120 Function will check given url and try to verify if it's a valid
121 121 link. Sometimes it may happened that git will issue basic
122 122 auth request that can cause whole API to hang when used from python
123 123 or other external calls.
124 124
125 125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 126 when the return code is non 200
127 127 """
128 128 # check first if it's not an url
129 129 if os.path.isdir(url) or url.startswith('file:'):
130 130 return True
131 131
132 132 if '+' in url.split('://', 1)[0]:
133 133 url = url.split('+', 1)[1]
134 134
135 135 # Request the _remote to verify the url
136 136 return connection.Git.check_url(url, config.serialize())
137 137
138 138 @staticmethod
139 139 def is_valid_repository(path):
140 140 if os.path.isdir(os.path.join(path, '.git')):
141 141 return True
142 142 # check case of bare repository
143 143 try:
144 144 GitRepository(path)
145 145 return True
146 146 except VCSError:
147 147 pass
148 148 return False
149 149
150 150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 151 bare=False):
152 152 if create and os.path.exists(self.path):
153 153 raise RepositoryError(
154 154 "Cannot create repository at %s, location already exist"
155 155 % self.path)
156 156
157 157 if bare and do_workspace_checkout:
158 158 raise RepositoryError("Cannot update a bare repository")
159 159 try:
160 160
161 161 if src_url:
162 162 # check URL before any actions
163 163 GitRepository.check_url(src_url, self.config)
164 164
165 165 if create:
166 166 os.makedirs(self.path, mode=0o755)
167 167
168 168 if bare:
169 169 self._remote.init_bare()
170 170 else:
171 171 self._remote.init()
172 172
173 173 if src_url and bare:
174 174 # bare repository only allows a fetch and checkout is not allowed
175 175 self.fetch(src_url, commit_ids=None)
176 176 elif src_url:
177 177 self.pull(src_url, commit_ids=None,
178 178 update_after=do_workspace_checkout)
179 179
180 180 else:
181 181 if not self._remote.assert_correct_path():
182 182 raise RepositoryError(
183 183 'Path "%s" does not contain a Git repository' %
184 184 (self.path,))
185 185
186 186 # TODO: johbo: check if we have to translate the OSError here
187 187 except OSError as err:
188 188 raise RepositoryError(err)
189 189
190 190 def _get_all_commit_ids(self):
191 191 return self._remote.get_all_commit_ids()
192 192
193 193 def _get_commit_ids(self, filters=None):
194 194 # we must check if this repo is not empty, since later command
195 195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 196 # errors
197 197
198 198 head = self._remote.head(show_exc=False)
199 199
200 200 if not head:
201 201 return []
202 202
203 203 rev_filter = ['--branches', '--tags']
204 204 extra_filter = []
205 205
206 206 if filters:
207 207 if filters.get('since'):
208 208 extra_filter.append('--since=%s' % (filters['since']))
209 209 if filters.get('until'):
210 210 extra_filter.append('--until=%s' % (filters['until']))
211 211 if filters.get('branch_name'):
212 212 rev_filter = []
213 213 extra_filter.append(filters['branch_name'])
214 214 rev_filter.extend(extra_filter)
215 215
216 216 # if filters.get('start') or filters.get('end'):
217 217 # # skip is offset, max-count is limit
218 218 # if filters.get('start'):
219 219 # extra_filter += ' --skip=%s' % filters['start']
220 220 # if filters.get('end'):
221 221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 222
223 223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 224 try:
225 225 output, __ = self.run_git_command(cmd)
226 226 except RepositoryError:
227 227 # Can be raised for empty repositories
228 228 return []
229 229 return output.splitlines()
230 230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232
232 233 def is_null(value):
233 234 return len(value) == commit_id_or_idx.count('0')
234 235
235 236 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 237 return self.commit_ids[-1]
237 238
238 239 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 240 *map(safe_str, [commit_id_or_idx, self.name]))
240 241
241 242 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
242 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
243 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
244 try:
245 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
246 except Exception:
247 raise CommitDoesNotExistError(commit_missing_err)
243 is_branch = reference_obj and reference_obj.branch
248 244
249 elif is_bstr:
250 # Need to call remote to translate id for tagging scenario
245 lookup_ok = False
246 if is_bstr:
247 # Need to call remote to translate id for tagging scenarios,
248 # or branch that are numeric
251 249 try:
252 250 remote_data = self._remote.get_object(commit_id_or_idx,
253 251 maybe_unreachable=maybe_unreachable)
254 252 commit_id_or_idx = remote_data["commit_id"]
253 lookup_ok = True
255 254 except (CommitDoesNotExistError,):
256 raise CommitDoesNotExistError(commit_missing_err)
255 lookup_ok = False
256
257 if lookup_ok is False:
258 is_numeric_idx = \
259 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
260 or isinstance(commit_id_or_idx, int)
261 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
262 try:
263 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
264 lookup_ok = True
265 except Exception:
266 raise CommitDoesNotExistError(commit_missing_err)
267
268 # we failed regular lookup, and by integer number lookup
269 if lookup_ok is False:
270 raise CommitDoesNotExistError(commit_missing_err)
257 271
258 272 # Ensure we return full id
259 273 if not SHA_PATTERN.match(str(commit_id_or_idx)):
260 274 raise CommitDoesNotExistError(
261 275 "Given commit id %s not recognized" % commit_id_or_idx)
262 276 return commit_id_or_idx
263 277
264 278 def get_hook_location(self):
265 279 """
266 280 returns absolute path to location where hooks are stored
267 281 """
268 282 loc = os.path.join(self.path, 'hooks')
269 283 if not self.bare:
270 284 loc = os.path.join(self.path, '.git', 'hooks')
271 285 return loc
272 286
273 287 @LazyProperty
274 288 def last_change(self):
275 289 """
276 290 Returns last change made on this repository as
277 291 `datetime.datetime` object.
278 292 """
279 293 try:
280 294 return self.get_commit().date
281 295 except RepositoryError:
282 296 tzoffset = makedate()[1]
283 297 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
284 298
285 299 def _get_fs_mtime(self):
286 300 idx_loc = '' if self.bare else '.git'
287 301 # fallback to filesystem
288 302 in_path = os.path.join(self.path, idx_loc, "index")
289 303 he_path = os.path.join(self.path, idx_loc, "HEAD")
290 304 if os.path.exists(in_path):
291 305 return os.stat(in_path).st_mtime
292 306 else:
293 307 return os.stat(he_path).st_mtime
294 308
295 309 @LazyProperty
296 310 def description(self):
297 311 description = self._remote.get_description()
298 312 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
299 313
300 314 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
301 315 if self.is_empty():
302 316 return OrderedDict()
303 317
304 318 result = []
305 319 for ref, sha in self._refs.iteritems():
306 320 if ref.startswith(prefix):
307 321 ref_name = ref
308 322 if strip_prefix:
309 323 ref_name = ref[len(prefix):]
310 324 result.append((safe_unicode(ref_name), sha))
311 325
312 326 def get_name(entry):
313 327 return entry[0]
314 328
315 329 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
316 330
317 331 def _get_branches(self):
318 332 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
319 333
320 334 @CachedProperty
321 335 def branches(self):
322 336 return self._get_branches()
323 337
324 338 @CachedProperty
325 339 def branches_closed(self):
326 340 return {}
327 341
328 342 @CachedProperty
329 343 def bookmarks(self):
330 344 return {}
331 345
332 346 @CachedProperty
333 347 def branches_all(self):
334 348 all_branches = {}
335 349 all_branches.update(self.branches)
336 350 all_branches.update(self.branches_closed)
337 351 return all_branches
338 352
339 353 @CachedProperty
340 354 def tags(self):
341 355 return self._get_tags()
342 356
343 357 def _get_tags(self):
344 358 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
345 359
346 360 def tag(self, name, user, commit_id=None, message=None, date=None,
347 361 **kwargs):
348 362 # TODO: fix this method to apply annotated tags correct with message
349 363 """
350 364 Creates and returns a tag for the given ``commit_id``.
351 365
352 366 :param name: name for new tag
353 367 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
354 368 :param commit_id: commit id for which new tag would be created
355 369 :param message: message of the tag's commit
356 370 :param date: date of tag's commit
357 371
358 372 :raises TagAlreadyExistError: if tag with same name already exists
359 373 """
360 374 if name in self.tags:
361 375 raise TagAlreadyExistError("Tag %s already exists" % name)
362 376 commit = self.get_commit(commit_id=commit_id)
363 377 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
364 378
365 379 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
366 380
367 381 self._invalidate_prop_cache('tags')
368 382 self._invalidate_prop_cache('_refs')
369 383
370 384 return commit
371 385
372 386 def remove_tag(self, name, user, message=None, date=None):
373 387 """
374 388 Removes tag with the given ``name``.
375 389
376 390 :param name: name of the tag to be removed
377 391 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
378 392 :param message: message of the tag's removal commit
379 393 :param date: date of tag's removal commit
380 394
381 395 :raises TagDoesNotExistError: if tag with given name does not exists
382 396 """
383 397 if name not in self.tags:
384 398 raise TagDoesNotExistError("Tag %s does not exist" % name)
385 399
386 400 self._remote.tag_remove(name)
387 401 self._invalidate_prop_cache('tags')
388 402 self._invalidate_prop_cache('_refs')
389 403
390 404 def _get_refs(self):
391 405 return self._remote.get_refs()
392 406
393 407 @CachedProperty
394 408 def _refs(self):
395 409 return self._get_refs()
396 410
397 411 @property
398 412 def _ref_tree(self):
399 413 node = tree = {}
400 414 for ref, sha in self._refs.iteritems():
401 415 path = ref.split('/')
402 416 for bit in path[:-1]:
403 417 node = node.setdefault(bit, {})
404 418 node[path[-1]] = sha
405 419 node = tree
406 420 return tree
407 421
408 422 def get_remote_ref(self, ref_name):
409 423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
410 424 try:
411 425 return self._refs[ref_key]
412 426 except Exception:
413 427 return
414 428
415 429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
416 translate_tag=True, maybe_unreachable=False):
430 translate_tag=True, maybe_unreachable=False, reference_obj=None):
417 431 """
418 432 Returns `GitCommit` object representing commit from git repository
419 433 at the given `commit_id` or head (most recent commit) if None given.
420 434 """
435
421 436 if self.is_empty():
422 437 raise EmptyRepositoryError("There are no commits yet")
423 438
424 439 if commit_id is not None:
425 440 self._validate_commit_id(commit_id)
426 441 try:
427 442 # we have cached idx, use it without contacting the remote
428 443 idx = self._commit_ids[commit_id]
429 444 return GitCommit(self, commit_id, idx, pre_load=pre_load)
430 445 except KeyError:
431 446 pass
432 447
433 448 elif commit_idx is not None:
434 449 self._validate_commit_idx(commit_idx)
435 450 try:
436 451 _commit_id = self.commit_ids[commit_idx]
437 452 if commit_idx < 0:
438 453 commit_idx = self.commit_ids.index(_commit_id)
439 454 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
440 455 except IndexError:
441 456 commit_id = commit_idx
442 457 else:
443 458 commit_id = "tip"
444 459
445 460 if translate_tag:
446 commit_id = self._lookup_commit(commit_id, maybe_unreachable=maybe_unreachable)
461 commit_id = self._lookup_commit(
462 commit_id, maybe_unreachable=maybe_unreachable,
463 reference_obj=reference_obj)
447 464
448 465 try:
449 466 idx = self._commit_ids[commit_id]
450 467 except KeyError:
451 468 idx = -1
452 469
453 470 return GitCommit(self, commit_id, idx, pre_load=pre_load)
454 471
455 472 def get_commits(
456 473 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 474 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
458 475 """
459 476 Returns generator of `GitCommit` objects from start to end (both
460 477 are inclusive), in ascending date order.
461 478
462 479 :param start_id: None, str(commit_id)
463 480 :param end_id: None, str(commit_id)
464 481 :param start_date: if specified, commits with commit date less than
465 482 ``start_date`` would be filtered out from returned set
466 483 :param end_date: if specified, commits with commit date greater than
467 484 ``end_date`` would be filtered out from returned set
468 485 :param branch_name: if specified, commits not reachable from given
469 486 branch would be filtered out from returned set
470 487 :param show_hidden: Show hidden commits such as obsolete or hidden from
471 488 Mercurial evolve
472 489 :raise BranchDoesNotExistError: If given `branch_name` does not
473 490 exist.
474 491 :raise CommitDoesNotExistError: If commits for given `start` or
475 492 `end` could not be found.
476 493
477 494 """
478 495 if self.is_empty():
479 496 raise EmptyRepositoryError("There are no commits yet")
480 497
481 498 self._validate_branch_name(branch_name)
482 499
483 500 if start_id is not None:
484 501 self._validate_commit_id(start_id)
485 502 if end_id is not None:
486 503 self._validate_commit_id(end_id)
487 504
488 505 start_raw_id = self._lookup_commit(start_id)
489 506 start_pos = self._commit_ids[start_raw_id] if start_id else None
490 507 end_raw_id = self._lookup_commit(end_id)
491 508 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
492 509
493 510 if None not in [start_id, end_id] and start_pos > end_pos:
494 511 raise RepositoryError(
495 512 "Start commit '%s' cannot be after end commit '%s'" %
496 513 (start_id, end_id))
497 514
498 515 if end_pos is not None:
499 516 end_pos += 1
500 517
501 518 filter_ = []
502 519 if branch_name:
503 520 filter_.append({'branch_name': branch_name})
504 521 if start_date and not end_date:
505 522 filter_.append({'since': start_date})
506 523 if end_date and not start_date:
507 524 filter_.append({'until': end_date})
508 525 if start_date and end_date:
509 526 filter_.append({'since': start_date})
510 527 filter_.append({'until': end_date})
511 528
512 529 # if start_pos or end_pos:
513 530 # filter_.append({'start': start_pos})
514 531 # filter_.append({'end': end_pos})
515 532
516 533 if filter_:
517 534 revfilters = {
518 535 'branch_name': branch_name,
519 536 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
520 537 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
521 538 'start': start_pos,
522 539 'end': end_pos,
523 540 }
524 541 commit_ids = self._get_commit_ids(filters=revfilters)
525 542
526 543 else:
527 544 commit_ids = self.commit_ids
528 545
529 546 if start_pos or end_pos:
530 547 commit_ids = commit_ids[start_pos: end_pos]
531 548
532 549 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
533 550 translate_tag=translate_tags)
534 551
535 552 def get_diff(
536 553 self, commit1, commit2, path='', ignore_whitespace=False,
537 554 context=3, path1=None):
538 555 """
539 556 Returns (git like) *diff*, as plain text. Shows changes introduced by
540 557 ``commit2`` since ``commit1``.
541 558
542 559 :param commit1: Entry point from which diff is shown. Can be
543 560 ``self.EMPTY_COMMIT`` - in this case, patch showing all
544 561 the changes since empty state of the repository until ``commit2``
545 562 :param commit2: Until which commits changes should be shown.
546 563 :param ignore_whitespace: If set to ``True``, would not show whitespace
547 564 changes. Defaults to ``False``.
548 565 :param context: How many lines before/after changed lines should be
549 566 shown. Defaults to ``3``.
550 567 """
551 568 self._validate_diff_commits(commit1, commit2)
552 569 if path1 is not None and path1 != path:
553 570 raise ValueError("Diff of two different paths not supported.")
554 571
555 572 if path:
556 573 file_filter = path
557 574 else:
558 575 file_filter = None
559 576
560 577 diff = self._remote.diff(
561 578 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
562 579 opt_ignorews=ignore_whitespace,
563 580 context=context)
564 581 return GitDiff(diff)
565 582
566 583 def strip(self, commit_id, branch_name):
567 584 commit = self.get_commit(commit_id=commit_id)
568 585 if commit.merge:
569 586 raise Exception('Cannot reset to merge commit')
570 587
571 588 # parent is going to be the new head now
572 589 commit = commit.parents[0]
573 590 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574 591
575 592 # clear cached properties
576 593 self._invalidate_prop_cache('commit_ids')
577 594 self._invalidate_prop_cache('_refs')
578 595 self._invalidate_prop_cache('branches')
579 596
580 597 return len(self.commit_ids)
581 598
582 599 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
583 600 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
584 601 self, commit_id1, repo2, commit_id2)
585 602
586 603 if commit_id1 == commit_id2:
587 604 return commit_id1
588 605
589 606 if self != repo2:
590 607 commits = self._remote.get_missing_revs(
591 608 commit_id1, commit_id2, repo2.path)
592 609 if commits:
593 610 commit = repo2.get_commit(commits[-1])
594 611 if commit.parents:
595 612 ancestor_id = commit.parents[0].raw_id
596 613 else:
597 614 ancestor_id = None
598 615 else:
599 616 # no commits from other repo, ancestor_id is the commit_id2
600 617 ancestor_id = commit_id2
601 618 else:
602 619 output, __ = self.run_git_command(
603 620 ['merge-base', commit_id1, commit_id2])
604 621 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
605 622
606 623 log.debug('Found common ancestor with sha: %s', ancestor_id)
607 624
608 625 return ancestor_id
609 626
610 627 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
611 628 repo1 = self
612 629 ancestor_id = None
613 630
614 631 if commit_id1 == commit_id2:
615 632 commits = []
616 633 elif repo1 != repo2:
617 634 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
618 635 repo2.path)
619 636 commits = [
620 637 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
621 638 for commit_id in reversed(missing_ids)]
622 639 else:
623 640 output, __ = repo1.run_git_command(
624 641 ['log', '--reverse', '--pretty=format: %H', '-s',
625 642 '%s..%s' % (commit_id1, commit_id2)])
626 643 commits = [
627 644 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
628 645 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
629 646
630 647 return commits
631 648
632 649 @LazyProperty
633 650 def in_memory_commit(self):
634 651 """
635 652 Returns ``GitInMemoryCommit`` object for this repository.
636 653 """
637 654 return GitInMemoryCommit(self)
638 655
639 656 def pull(self, url, commit_ids=None, update_after=False):
640 657 """
641 658 Pull changes from external location. Pull is different in GIT
642 659 that fetch since it's doing a checkout
643 660
644 661 :param commit_ids: Optional. Can be set to a list of commit ids
645 662 which shall be pulled from the other repository.
646 663 """
647 664 refs = None
648 665 if commit_ids is not None:
649 666 remote_refs = self._remote.get_remote_refs(url)
650 667 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
651 668 self._remote.pull(url, refs=refs, update_after=update_after)
652 669 self._remote.invalidate_vcs_cache()
653 670
654 671 def fetch(self, url, commit_ids=None):
655 672 """
656 673 Fetch all git objects from external location.
657 674 """
658 675 self._remote.sync_fetch(url, refs=commit_ids)
659 676 self._remote.invalidate_vcs_cache()
660 677
661 678 def push(self, url):
662 679 refs = None
663 680 self._remote.sync_push(url, refs=refs)
664 681
665 682 def set_refs(self, ref_name, commit_id):
666 683 self._remote.set_refs(ref_name, commit_id)
667 684 self._invalidate_prop_cache('_refs')
668 685
669 686 def remove_ref(self, ref_name):
670 687 self._remote.remove_ref(ref_name)
671 688 self._invalidate_prop_cache('_refs')
672 689
673 690 def run_gc(self, prune=True):
674 691 cmd = ['gc', '--aggressive']
675 692 if prune:
676 693 cmd += ['--prune=now']
677 694 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
678 695 return stderr
679 696
680 697 def _update_server_info(self):
681 698 """
682 699 runs gits update-server-info command in this repo instance
683 700 """
684 701 self._remote.update_server_info()
685 702
686 703 def _current_branch(self):
687 704 """
688 705 Return the name of the current branch.
689 706
690 707 It only works for non bare repositories (i.e. repositories with a
691 708 working copy)
692 709 """
693 710 if self.bare:
694 711 raise RepositoryError('Bare git repos do not have active branches')
695 712
696 713 if self.is_empty():
697 714 return None
698 715
699 716 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
700 717 return stdout.strip()
701 718
702 719 def _checkout(self, branch_name, create=False, force=False):
703 720 """
704 721 Checkout a branch in the working directory.
705 722
706 723 It tries to create the branch if create is True, failing if the branch
707 724 already exists.
708 725
709 726 It only works for non bare repositories (i.e. repositories with a
710 727 working copy)
711 728 """
712 729 if self.bare:
713 730 raise RepositoryError('Cannot checkout branches in a bare git repo')
714 731
715 732 cmd = ['checkout']
716 733 if force:
717 734 cmd.append('-f')
718 735 if create:
719 736 cmd.append('-b')
720 737 cmd.append(branch_name)
721 738 self.run_git_command(cmd, fail_on_stderr=False)
722 739
723 740 def _create_branch(self, branch_name, commit_id):
724 741 """
725 742 creates a branch in a GIT repo
726 743 """
727 744 self._remote.create_branch(branch_name, commit_id)
728 745
729 746 def _identify(self):
730 747 """
731 748 Return the current state of the working directory.
732 749 """
733 750 if self.bare:
734 751 raise RepositoryError('Bare git repos do not have active branches')
735 752
736 753 if self.is_empty():
737 754 return None
738 755
739 756 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
740 757 return stdout.strip()
741 758
742 759 def _local_clone(self, clone_path, branch_name, source_branch=None):
743 760 """
744 761 Create a local clone of the current repo.
745 762 """
746 763 # N.B.(skreft): the --branch option is required as otherwise the shallow
747 764 # clone will only fetch the active branch.
748 765 cmd = ['clone', '--branch', branch_name,
749 766 self.path, os.path.abspath(clone_path)]
750 767
751 768 self.run_git_command(cmd, fail_on_stderr=False)
752 769
753 770 # if we get the different source branch, make sure we also fetch it for
754 771 # merge conditions
755 772 if source_branch and source_branch != branch_name:
756 773 # check if the ref exists.
757 774 shadow_repo = GitRepository(os.path.abspath(clone_path))
758 775 if shadow_repo.get_remote_ref(source_branch):
759 776 cmd = ['fetch', self.path, source_branch]
760 777 self.run_git_command(cmd, fail_on_stderr=False)
761 778
762 779 def _local_fetch(self, repository_path, branch_name, use_origin=False):
763 780 """
764 781 Fetch a branch from a local repository.
765 782 """
766 783 repository_path = os.path.abspath(repository_path)
767 784 if repository_path == self.path:
768 785 raise ValueError('Cannot fetch from the same repository')
769 786
770 787 if use_origin:
771 788 branch_name = '+{branch}:refs/heads/{branch}'.format(
772 789 branch=branch_name)
773 790
774 791 cmd = ['fetch', '--no-tags', '--update-head-ok',
775 792 repository_path, branch_name]
776 793 self.run_git_command(cmd, fail_on_stderr=False)
777 794
778 795 def _local_reset(self, branch_name):
779 796 branch_name = '{}'.format(branch_name)
780 797 cmd = ['reset', '--hard', branch_name, '--']
781 798 self.run_git_command(cmd, fail_on_stderr=False)
782 799
783 800 def _last_fetch_heads(self):
784 801 """
785 802 Return the last fetched heads that need merging.
786 803
787 804 The algorithm is defined at
788 805 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
789 806 """
790 807 if not self.bare:
791 808 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
792 809 else:
793 810 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
794 811
795 812 heads = []
796 813 with open(fetch_heads_path) as f:
797 814 for line in f:
798 815 if ' not-for-merge ' in line:
799 816 continue
800 817 line = re.sub('\t.*', '', line, flags=re.DOTALL)
801 818 heads.append(line)
802 819
803 820 return heads
804 821
805 822 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
806 823 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
807 824
808 825 def _local_pull(self, repository_path, branch_name, ff_only=True):
809 826 """
810 827 Pull a branch from a local repository.
811 828 """
812 829 if self.bare:
813 830 raise RepositoryError('Cannot pull into a bare git repository')
814 831 # N.B.(skreft): The --ff-only option is to make sure this is a
815 832 # fast-forward (i.e., we are only pulling new changes and there are no
816 833 # conflicts with our current branch)
817 834 # Additionally, that option needs to go before --no-tags, otherwise git
818 835 # pull complains about it being an unknown flag.
819 836 cmd = ['pull']
820 837 if ff_only:
821 838 cmd.append('--ff-only')
822 839 cmd.extend(['--no-tags', repository_path, branch_name])
823 840 self.run_git_command(cmd, fail_on_stderr=False)
824 841
825 842 def _local_merge(self, merge_message, user_name, user_email, heads):
826 843 """
827 844 Merge the given head into the checked out branch.
828 845
829 846 It will force a merge commit.
830 847
831 848 Currently it raises an error if the repo is empty, as it is not possible
832 849 to create a merge commit in an empty repo.
833 850
834 851 :param merge_message: The message to use for the merge commit.
835 852 :param heads: the heads to merge.
836 853 """
837 854 if self.bare:
838 855 raise RepositoryError('Cannot merge into a bare git repository')
839 856
840 857 if not heads:
841 858 return
842 859
843 860 if self.is_empty():
844 861 # TODO(skreft): do something more robust in this case.
845 862 raise RepositoryError('Do not know how to merge into empty repositories yet')
846 863 unresolved = None
847 864
848 865 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
849 866 # commit message. We also specify the user who is doing the merge.
850 867 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
851 868 '-c', 'user.email=%s' % safe_str(user_email),
852 869 'merge', '--no-ff', '-m', safe_str(merge_message)]
853 870
854 871 merge_cmd = cmd + heads
855 872
856 873 try:
857 874 self.run_git_command(merge_cmd, fail_on_stderr=False)
858 875 except RepositoryError:
859 876 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
860 877 fail_on_stderr=False)[0].splitlines()
861 878 # NOTE(marcink): we add U notation for consistent with HG backend output
862 879 unresolved = ['U {}'.format(f) for f in files]
863 880
864 881 # Cleanup any merge leftovers
865 882 self._remote.invalidate_vcs_cache()
866 883 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
867 884
868 885 if unresolved:
869 886 raise UnresolvedFilesInRepo(unresolved)
870 887 else:
871 888 raise
872 889
873 890 def _local_push(
874 891 self, source_branch, repository_path, target_branch,
875 892 enable_hooks=False, rc_scm_data=None):
876 893 """
877 894 Push the source_branch to the given repository and target_branch.
878 895
879 896 Currently it if the target_branch is not master and the target repo is
880 897 empty, the push will work, but then GitRepository won't be able to find
881 898 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
882 899 pointing to master, which does not exist).
883 900
884 901 It does not run the hooks in the target repo.
885 902 """
886 903 # TODO(skreft): deal with the case in which the target repo is empty,
887 904 # and the target_branch is not master.
888 905 target_repo = GitRepository(repository_path)
889 906 if (not target_repo.bare and
890 907 target_repo._current_branch() == target_branch):
891 908 # Git prevents pushing to the checked out branch, so simulate it by
892 909 # pulling into the target repository.
893 910 target_repo._local_pull(self.path, source_branch)
894 911 else:
895 912 cmd = ['push', os.path.abspath(repository_path),
896 913 '%s:%s' % (source_branch, target_branch)]
897 914 gitenv = {}
898 915 if rc_scm_data:
899 916 gitenv.update({'RC_SCM_DATA': rc_scm_data})
900 917
901 918 if not enable_hooks:
902 919 gitenv['RC_SKIP_HOOKS'] = '1'
903 920 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
904 921
905 922 def _get_new_pr_branch(self, source_branch, target_branch):
906 923 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
907 924 pr_branches = []
908 925 for branch in self.branches:
909 926 if branch.startswith(prefix):
910 927 pr_branches.append(int(branch[len(prefix):]))
911 928
912 929 if not pr_branches:
913 930 branch_id = 0
914 931 else:
915 932 branch_id = max(pr_branches) + 1
916 933
917 934 return '%s%d' % (prefix, branch_id)
918 935
919 936 def _maybe_prepare_merge_workspace(
920 937 self, repo_id, workspace_id, target_ref, source_ref):
921 938 shadow_repository_path = self._get_shadow_repository_path(
922 939 self.path, repo_id, workspace_id)
923 940 if not os.path.exists(shadow_repository_path):
924 941 self._local_clone(
925 942 shadow_repository_path, target_ref.name, source_ref.name)
926 943 log.debug('Prepared %s shadow repository in %s',
927 944 self.alias, shadow_repository_path)
928 945
929 946 return shadow_repository_path
930 947
931 948 def _merge_repo(self, repo_id, workspace_id, target_ref,
932 949 source_repo, source_ref, merge_message,
933 950 merger_name, merger_email, dry_run=False,
934 951 use_rebase=False, close_branch=False):
935 952
936 953 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
937 954 'rebase' if use_rebase else 'merge', dry_run)
938 955 if target_ref.commit_id != self.branches[target_ref.name]:
939 956 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
940 957 target_ref.commit_id, self.branches[target_ref.name])
941 958 return MergeResponse(
942 959 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
943 960 metadata={'target_ref': target_ref})
944 961
945 962 shadow_repository_path = self._maybe_prepare_merge_workspace(
946 963 repo_id, workspace_id, target_ref, source_ref)
947 964 shadow_repo = self.get_shadow_instance(shadow_repository_path)
948 965
949 966 # checkout source, if it's different. Otherwise we could not
950 967 # fetch proper commits for merge testing
951 968 if source_ref.name != target_ref.name:
952 969 if shadow_repo.get_remote_ref(source_ref.name):
953 970 shadow_repo._checkout(source_ref.name, force=True)
954 971
955 972 # checkout target, and fetch changes
956 973 shadow_repo._checkout(target_ref.name, force=True)
957 974
958 975 # fetch/reset pull the target, in case it is changed
959 976 # this handles even force changes
960 977 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
961 978 shadow_repo._local_reset(target_ref.name)
962 979
963 980 # Need to reload repo to invalidate the cache, or otherwise we cannot
964 981 # retrieve the last target commit.
965 982 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 983 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
967 984 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
968 985 target_ref, target_ref.commit_id,
969 986 shadow_repo.branches[target_ref.name])
970 987 return MergeResponse(
971 988 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
972 989 metadata={'target_ref': target_ref})
973 990
974 991 # calculate new branch
975 992 pr_branch = shadow_repo._get_new_pr_branch(
976 993 source_ref.name, target_ref.name)
977 994 log.debug('using pull-request merge branch: `%s`', pr_branch)
978 995 # checkout to temp branch, and fetch changes
979 996 shadow_repo._checkout(pr_branch, create=True)
980 997 try:
981 998 shadow_repo._local_fetch(source_repo.path, source_ref.name)
982 999 except RepositoryError:
983 1000 log.exception('Failure when doing local fetch on '
984 1001 'shadow repo: %s', shadow_repo)
985 1002 return MergeResponse(
986 1003 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
987 1004 metadata={'source_ref': source_ref})
988 1005
989 1006 merge_ref = None
990 1007 merge_failure_reason = MergeFailureReason.NONE
991 1008 metadata = {}
992 1009 try:
993 1010 shadow_repo._local_merge(merge_message, merger_name, merger_email,
994 1011 [source_ref.commit_id])
995 1012 merge_possible = True
996 1013
997 1014 # Need to invalidate the cache, or otherwise we
998 1015 # cannot retrieve the merge commit.
999 1016 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1000 1017 merge_commit_id = shadow_repo.branches[pr_branch]
1001 1018
1002 1019 # Set a reference pointing to the merge commit. This reference may
1003 1020 # be used to easily identify the last successful merge commit in
1004 1021 # the shadow repository.
1005 1022 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1006 1023 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1007 1024 except RepositoryError as e:
1008 1025 log.exception('Failure when doing local merge on git shadow repo')
1009 1026 if isinstance(e, UnresolvedFilesInRepo):
1010 1027 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1011 1028
1012 1029 merge_possible = False
1013 1030 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1014 1031
1015 1032 if merge_possible and not dry_run:
1016 1033 try:
1017 1034 shadow_repo._local_push(
1018 1035 pr_branch, self.path, target_ref.name, enable_hooks=True,
1019 1036 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1020 1037 merge_succeeded = True
1021 1038 except RepositoryError:
1022 1039 log.exception(
1023 1040 'Failure when doing local push from the shadow '
1024 1041 'repository to the target repository at %s.', self.path)
1025 1042 merge_succeeded = False
1026 1043 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1027 1044 metadata['target'] = 'git shadow repo'
1028 1045 metadata['merge_commit'] = pr_branch
1029 1046 else:
1030 1047 merge_succeeded = False
1031 1048
1032 1049 return MergeResponse(
1033 1050 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1034 1051 metadata=metadata)
@@ -1,1012 +1,1012 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, exceptions
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 46 from rhodecode.lib.vcs.compat import configparser
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = dict((commit_id, index)
106 106 for index, commit_id in enumerate(commit_ids))
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (safe_unicode(n), hexlify(h)) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 273 self._remote.strip(commit_id, update=False, backup="none")
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 301 self, commit_id1, repo2, commit_id2)
302 302
303 303 if commit_id1 == commit_id2:
304 304 return commit_id1
305 305
306 306 ancestors = self._remote.revs_from_revspec(
307 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 308 other_path=repo2.path)
309 309
310 310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 311
312 312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 313 return ancestor_id
314 314
315 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 316 if commit_id1 == commit_id2:
317 317 commits = []
318 318 else:
319 319 if merge:
320 320 indexes = self._remote.revs_from_revspec(
321 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 323 else:
324 324 indexes = self._remote.revs_from_revspec(
325 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 326 commit_id1, other_path=repo2.path)
327 327
328 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 329 for idx in indexes]
330 330
331 331 return commits
332 332
333 333 @staticmethod
334 334 def check_url(url, config):
335 335 """
336 336 Function will check given url and try to verify if it's a valid
337 337 link. Sometimes it may happened that mercurial will issue basic
338 338 auth request that can cause whole API to hang when used from python
339 339 or other external calls.
340 340
341 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 342 when the return code is non 200
343 343 """
344 344 # check first if it's not an local url
345 345 if os.path.isdir(url) or url.startswith('file:'):
346 346 return True
347 347
348 348 # Request the _remote to verify the url
349 349 return connection.Hg.check_url(url, config.serialize())
350 350
351 351 @staticmethod
352 352 def is_valid_repository(path):
353 353 return os.path.isdir(os.path.join(path, '.hg'))
354 354
355 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 356 """
357 357 Function will check for mercurial repository in given path. If there
358 358 is no repository in that path it will raise an exception unless
359 359 `create` parameter is set to True - in that case repository would
360 360 be created.
361 361
362 362 If `src_url` is given, would try to clone repository from the
363 363 location at given clone_point. Additionally it'll make update to
364 364 working copy accordingly to `do_workspace_checkout` flag.
365 365 """
366 366 if create and os.path.exists(self.path):
367 367 raise RepositoryError(
368 368 "Cannot create repository at %s, location already exist"
369 369 % self.path)
370 370
371 371 if src_url:
372 372 url = str(self._get_url(src_url))
373 373 MercurialRepository.check_url(url, self.config)
374 374
375 375 self._remote.clone(url, self.path, do_workspace_checkout)
376 376
377 377 # Don't try to create if we've already cloned repo
378 378 create = False
379 379
380 380 if create:
381 381 os.makedirs(self.path, mode=0o755)
382 382 self._remote.localrepository(create)
383 383
384 384 @LazyProperty
385 385 def in_memory_commit(self):
386 386 return MercurialInMemoryCommit(self)
387 387
388 388 @LazyProperty
389 389 def description(self):
390 390 description = self._remote.get_config_value(
391 391 'web', 'description', untrusted=True)
392 392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393 393
394 394 @LazyProperty
395 395 def contact(self):
396 396 contact = (
397 397 self._remote.get_config_value("web", "contact") or
398 398 self._remote.get_config_value("ui", "username"))
399 399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400 400
401 401 @LazyProperty
402 402 def last_change(self):
403 403 """
404 404 Returns last change made on this repository as
405 405 `datetime.datetime` object.
406 406 """
407 407 try:
408 408 return self.get_commit().date
409 409 except RepositoryError:
410 410 tzoffset = makedate()[1]
411 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 412
413 413 def _get_fs_mtime(self):
414 414 # fallback to filesystem
415 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 416 st_path = os.path.join(self.path, '.hg', "store")
417 417 if os.path.exists(cl_path):
418 418 return os.stat(cl_path).st_mtime
419 419 else:
420 420 return os.stat(st_path).st_mtime
421 421
422 422 def _get_url(self, url):
423 423 """
424 424 Returns normalized url. If schema is not given, would fall
425 425 to filesystem
426 426 (``file:///``) schema.
427 427 """
428 428 url = url.encode('utf8')
429 429 if url != 'default' and '://' not in url:
430 430 url = "file:" + urllib.pathname2url(url)
431 431 return url
432 432
433 433 def get_hook_location(self):
434 434 """
435 435 returns absolute path to location where hooks are stored
436 436 """
437 437 return os.path.join(self.path, '.hg', '.hgrc')
438 438
439 439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False):
440 translate_tag=None, maybe_unreachable=False, reference_obj=None):
441 441 """
442 442 Returns ``MercurialCommit`` object representing repository's
443 443 commit at the given `commit_id` or `commit_idx`.
444 444 """
445 445 if self.is_empty():
446 446 raise EmptyRepositoryError("There are no commits yet")
447 447
448 448 if commit_id is not None:
449 449 self._validate_commit_id(commit_id)
450 450 try:
451 451 # we have cached idx, use it without contacting the remote
452 452 idx = self._commit_ids[commit_id]
453 453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 454 except KeyError:
455 455 pass
456 456
457 457 elif commit_idx is not None:
458 458 self._validate_commit_idx(commit_idx)
459 459 try:
460 460 _commit_id = self.commit_ids[commit_idx]
461 461 if commit_idx < 0:
462 462 commit_idx = self.commit_ids.index(_commit_id)
463 463
464 464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 465 except IndexError:
466 466 commit_id = commit_idx
467 467 else:
468 468 commit_id = "tip"
469 469
470 470 if isinstance(commit_id, unicode):
471 471 commit_id = safe_str(commit_id)
472 472
473 473 try:
474 474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 475 except CommitDoesNotExistError:
476 476 msg = "Commit {} does not exist for `{}`".format(
477 477 *map(safe_str, [commit_id, self.name]))
478 478 raise CommitDoesNotExistError(msg)
479 479
480 480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481 481
482 482 def get_commits(
483 483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 485 """
486 486 Returns generator of ``MercurialCommit`` objects from start to end
487 487 (both are inclusive)
488 488
489 489 :param start_id: None, str(commit_id)
490 490 :param end_id: None, str(commit_id)
491 491 :param start_date: if specified, commits with commit date less than
492 492 ``start_date`` would be filtered out from returned set
493 493 :param end_date: if specified, commits with commit date greater than
494 494 ``end_date`` would be filtered out from returned set
495 495 :param branch_name: if specified, commits not reachable from given
496 496 branch would be filtered out from returned set
497 497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 498 Mercurial evolve
499 499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 500 exist.
501 501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 502 ``end`` could not be found.
503 503 """
504 504 # actually we should check now if it's not an empty repo
505 505 if self.is_empty():
506 506 raise EmptyRepositoryError("There are no commits yet")
507 507 self._validate_branch_name(branch_name)
508 508
509 509 branch_ancestors = False
510 510 if start_id is not None:
511 511 self._validate_commit_id(start_id)
512 512 c_start = self.get_commit(commit_id=start_id)
513 513 start_pos = self._commit_ids[c_start.raw_id]
514 514 else:
515 515 start_pos = None
516 516
517 517 if end_id is not None:
518 518 self._validate_commit_id(end_id)
519 519 c_end = self.get_commit(commit_id=end_id)
520 520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 521 else:
522 522 end_pos = None
523 523
524 524 if None not in [start_id, end_id] and start_pos > end_pos:
525 525 raise RepositoryError(
526 526 "Start commit '%s' cannot be after end commit '%s'" %
527 527 (start_id, end_id))
528 528
529 529 if end_pos is not None:
530 530 end_pos += 1
531 531
532 532 commit_filter = []
533 533
534 534 if branch_name and not branch_ancestors:
535 535 commit_filter.append('branch("%s")' % (branch_name,))
536 536 elif branch_name and branch_ancestors:
537 537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538 538
539 539 if start_date and not end_date:
540 540 commit_filter.append('date(">%s")' % (start_date,))
541 541 if end_date and not start_date:
542 542 commit_filter.append('date("<%s")' % (end_date,))
543 543 if start_date and end_date:
544 544 commit_filter.append(
545 545 'date(">%s") and date("<%s")' % (start_date, end_date))
546 546
547 547 if not show_hidden:
548 548 commit_filter.append('not obsolete()')
549 549 commit_filter.append('not hidden()')
550 550
551 551 # TODO: johbo: Figure out a simpler way for this solution
552 552 collection_generator = CollectionGenerator
553 553 if commit_filter:
554 554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 555 revisions = self._remote.rev_range([commit_filter])
556 556 collection_generator = MercurialIndexBasedCollectionGenerator
557 557 else:
558 558 revisions = self.commit_ids
559 559
560 560 if start_pos or end_pos:
561 561 revisions = revisions[start_pos:end_pos]
562 562
563 563 return collection_generator(self, revisions, pre_load=pre_load)
564 564
565 565 def pull(self, url, commit_ids=None):
566 566 """
567 567 Pull changes from external location.
568 568
569 569 :param commit_ids: Optional. Can be set to a list of commit ids
570 570 which shall be pulled from the other repository.
571 571 """
572 572 url = self._get_url(url)
573 573 self._remote.pull(url, commit_ids=commit_ids)
574 574 self._remote.invalidate_vcs_cache()
575 575
576 576 def fetch(self, url, commit_ids=None):
577 577 """
578 578 Backward compatibility with GIT fetch==pull
579 579 """
580 580 return self.pull(url, commit_ids=commit_ids)
581 581
582 582 def push(self, url):
583 583 url = self._get_url(url)
584 584 self._remote.sync_push(url)
585 585
586 586 def _local_clone(self, clone_path):
587 587 """
588 588 Create a local clone of the current repo.
589 589 """
590 590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 591 hooks=False)
592 592
593 593 def _update(self, revision, clean=False):
594 594 """
595 595 Update the working copy to the specified revision.
596 596 """
597 597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 598 self._remote.update(revision, clean=clean)
599 599
600 600 def _identify(self):
601 601 """
602 602 Return the current state of the working directory.
603 603 """
604 604 return self._remote.identify().strip().rstrip('+')
605 605
606 606 def _heads(self, branch=None):
607 607 """
608 608 Return the commit ids of the repository heads.
609 609 """
610 610 return self._remote.heads(branch=branch).strip().split(' ')
611 611
612 612 def _ancestor(self, revision1, revision2):
613 613 """
614 614 Return the common ancestor of the two revisions.
615 615 """
616 616 return self._remote.ancestor(revision1, revision2)
617 617
618 618 def _local_push(
619 619 self, revision, repository_path, push_branches=False,
620 620 enable_hooks=False):
621 621 """
622 622 Push the given revision to the specified repository.
623 623
624 624 :param push_branches: allow to create branches in the target repo.
625 625 """
626 626 self._remote.push(
627 627 [revision], repository_path, hooks=enable_hooks,
628 628 push_branches=push_branches)
629 629
630 630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 632 """
633 633 Merge the given source_revision into the checked out revision.
634 634
635 635 Returns the commit id of the merge and a boolean indicating if the
636 636 commit needs to be pushed.
637 637 """
638 638 source_ref_commit_id = source_ref.commit_id
639 639 target_ref_commit_id = target_ref.commit_id
640 640
641 641 # update our workdir to target ref, for proper merge
642 642 self._update(target_ref_commit_id, clean=True)
643 643
644 644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646 646
647 647 if close_commit_id:
648 648 # NOTE(marcink): if we get the close commit, this is our new source
649 649 # which will include the close commit itself.
650 650 source_ref_commit_id = close_commit_id
651 651
652 652 if ancestor == source_ref_commit_id:
653 653 # Nothing to do, the changes were already integrated
654 654 return target_ref_commit_id, False
655 655
656 656 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 657 # In this case we should force a commit message
658 658 return source_ref_commit_id, True
659 659
660 660 unresolved = None
661 661 if use_rebase:
662 662 try:
663 663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 665 self._remote.rebase(
666 666 source=source_ref_commit_id, dest=target_ref_commit_id)
667 667 self._remote.invalidate_vcs_cache()
668 668 self._update(bookmark_name, clean=True)
669 669 return self._identify(), True
670 670 except RepositoryError as e:
671 671 # The rebase-abort may raise another exception which 'hides'
672 672 # the original one, therefore we log it here.
673 673 log.exception('Error while rebasing shadow repo during merge.')
674 674 if 'unresolved conflicts' in safe_str(e):
675 675 unresolved = self._remote.get_unresolved_files()
676 676 log.debug('unresolved files: %s', unresolved)
677 677
678 678 # Cleanup any rebase leftovers
679 679 self._remote.invalidate_vcs_cache()
680 680 self._remote.rebase(abort=True)
681 681 self._remote.invalidate_vcs_cache()
682 682 self._remote.update(clean=True)
683 683 if unresolved:
684 684 raise UnresolvedFilesInRepo(unresolved)
685 685 else:
686 686 raise
687 687 else:
688 688 try:
689 689 self._remote.merge(source_ref_commit_id)
690 690 self._remote.invalidate_vcs_cache()
691 691 self._remote.commit(
692 692 message=safe_str(merge_message),
693 693 username=safe_str('%s <%s>' % (user_name, user_email)))
694 694 self._remote.invalidate_vcs_cache()
695 695 return self._identify(), True
696 696 except RepositoryError as e:
697 697 # The merge-abort may raise another exception which 'hides'
698 698 # the original one, therefore we log it here.
699 699 log.exception('Error while merging shadow repo during merge.')
700 700 if 'unresolved merge conflicts' in safe_str(e):
701 701 unresolved = self._remote.get_unresolved_files()
702 702 log.debug('unresolved files: %s', unresolved)
703 703
704 704 # Cleanup any merge leftovers
705 705 self._remote.update(clean=True)
706 706 if unresolved:
707 707 raise UnresolvedFilesInRepo(unresolved)
708 708 else:
709 709 raise
710 710
711 711 def _local_close(self, target_ref, user_name, user_email,
712 712 source_ref, close_message=''):
713 713 """
714 714 Close the branch of the given source_revision
715 715
716 716 Returns the commit id of the close and a boolean indicating if the
717 717 commit needs to be pushed.
718 718 """
719 719 self._update(source_ref.commit_id)
720 720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 721 try:
722 722 self._remote.commit(
723 723 message=safe_str(message),
724 724 username=safe_str('%s <%s>' % (user_name, user_email)),
725 725 close_branch=True)
726 726 self._remote.invalidate_vcs_cache()
727 727 return self._identify(), True
728 728 except RepositoryError:
729 729 # Cleanup any commit leftovers
730 730 self._remote.update(clean=True)
731 731 raise
732 732
733 733 def _is_the_same_branch(self, target_ref, source_ref):
734 734 return (
735 735 self._get_branch_name(target_ref) ==
736 736 self._get_branch_name(source_ref))
737 737
738 738 def _get_branch_name(self, ref):
739 739 if ref.type == 'branch':
740 740 return ref.name
741 741 return self._remote.ctx_branch(ref.commit_id)
742 742
743 743 def _maybe_prepare_merge_workspace(
744 744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 745 shadow_repository_path = self._get_shadow_repository_path(
746 746 self.path, repo_id, workspace_id)
747 747 if not os.path.exists(shadow_repository_path):
748 748 self._local_clone(shadow_repository_path)
749 749 log.debug(
750 750 'Prepared shadow repository in %s', shadow_repository_path)
751 751
752 752 return shadow_repository_path
753 753
754 754 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 755 source_repo, source_ref, merge_message,
756 756 merger_name, merger_email, dry_run=False,
757 757 use_rebase=False, close_branch=False):
758 758
759 759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 760 'rebase' if use_rebase else 'merge', dry_run)
761 761 if target_ref.commit_id not in self._heads():
762 762 return MergeResponse(
763 763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 764 metadata={'target_ref': target_ref})
765 765
766 766 try:
767 767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 768 heads_all = self._heads(target_ref.name)
769 769 max_heads = 10
770 770 if len(heads_all) > max_heads:
771 771 heads = '\n,'.join(
772 772 heads_all[:max_heads] +
773 773 ['and {} more.'.format(len(heads_all)-max_heads)])
774 774 else:
775 775 heads = '\n,'.join(heads_all)
776 776 metadata = {
777 777 'target_ref': target_ref,
778 778 'source_ref': source_ref,
779 779 'heads': heads
780 780 }
781 781 return MergeResponse(
782 782 False, False, None,
783 783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
784 784 metadata=metadata)
785 785 except CommitDoesNotExistError:
786 786 log.exception('Failure when looking up branch heads on hg target')
787 787 return MergeResponse(
788 788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
789 789 metadata={'target_ref': target_ref})
790 790
791 791 shadow_repository_path = self._maybe_prepare_merge_workspace(
792 792 repo_id, workspace_id, target_ref, source_ref)
793 793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
794 794
795 795 log.debug('Pulling in target reference %s', target_ref)
796 796 self._validate_pull_reference(target_ref)
797 797 shadow_repo._local_pull(self.path, target_ref)
798 798
799 799 try:
800 800 log.debug('Pulling in source reference %s', source_ref)
801 801 source_repo._validate_pull_reference(source_ref)
802 802 shadow_repo._local_pull(source_repo.path, source_ref)
803 803 except CommitDoesNotExistError:
804 804 log.exception('Failure when doing local pull on hg shadow repo')
805 805 return MergeResponse(
806 806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
807 807 metadata={'source_ref': source_ref})
808 808
809 809 merge_ref = None
810 810 merge_commit_id = None
811 811 close_commit_id = None
812 812 merge_failure_reason = MergeFailureReason.NONE
813 813 metadata = {}
814 814
815 815 # enforce that close branch should be used only in case we source from
816 816 # an actual Branch
817 817 close_branch = close_branch and source_ref.type == 'branch'
818 818
819 819 # don't allow to close branch if source and target are the same
820 820 close_branch = close_branch and source_ref.name != target_ref.name
821 821
822 822 needs_push_on_close = False
823 823 if close_branch and not use_rebase and not dry_run:
824 824 try:
825 825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
826 826 target_ref, merger_name, merger_email, source_ref)
827 827 merge_possible = True
828 828 except RepositoryError:
829 829 log.exception('Failure when doing close branch on '
830 830 'shadow repo: %s', shadow_repo)
831 831 merge_possible = False
832 832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
833 833 else:
834 834 merge_possible = True
835 835
836 836 needs_push = False
837 837 if merge_possible:
838 838
839 839 try:
840 840 merge_commit_id, needs_push = shadow_repo._local_merge(
841 841 target_ref, merge_message, merger_name, merger_email,
842 842 source_ref, use_rebase=use_rebase,
843 843 close_commit_id=close_commit_id, dry_run=dry_run)
844 844 merge_possible = True
845 845
846 846 # read the state of the close action, if it
847 847 # maybe required a push
848 848 needs_push = needs_push or needs_push_on_close
849 849
850 850 # Set a bookmark pointing to the merge commit. This bookmark
851 851 # may be used to easily identify the last successful merge
852 852 # commit in the shadow repository.
853 853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
854 854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
855 855 except SubrepoMergeError:
856 856 log.exception(
857 857 'Subrepo merge error during local merge on hg shadow repo.')
858 858 merge_possible = False
859 859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
860 860 needs_push = False
861 861 except RepositoryError as e:
862 862 log.exception('Failure when doing local merge on hg shadow repo')
863 863 if isinstance(e, UnresolvedFilesInRepo):
864 864 all_conflicts = list(e.args[0])
865 865 max_conflicts = 20
866 866 if len(all_conflicts) > max_conflicts:
867 867 conflicts = all_conflicts[:max_conflicts] \
868 868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 869 else:
870 870 conflicts = all_conflicts
871 871 metadata['unresolved_files'] = \
872 872 '\n* conflict: ' + \
873 873 ('\n * conflict: '.join(conflicts))
874 874
875 875 merge_possible = False
876 876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 877 needs_push = False
878 878
879 879 if merge_possible and not dry_run:
880 880 if needs_push:
881 881 # In case the target is a bookmark, update it, so after pushing
882 882 # the bookmarks is also updated in the target.
883 883 if target_ref.type == 'book':
884 884 shadow_repo.bookmark(
885 885 target_ref.name, revision=merge_commit_id)
886 886 try:
887 887 shadow_repo_with_hooks = self.get_shadow_instance(
888 888 shadow_repository_path,
889 889 enable_hooks=True)
890 890 # This is the actual merge action, we push from shadow
891 891 # into origin.
892 892 # Note: the push_branches option will push any new branch
893 893 # defined in the source repository to the target. This may
894 894 # be dangerous as branches are permanent in Mercurial.
895 895 # This feature was requested in issue #441.
896 896 shadow_repo_with_hooks._local_push(
897 897 merge_commit_id, self.path, push_branches=True,
898 898 enable_hooks=True)
899 899
900 900 # maybe we also need to push the close_commit_id
901 901 if close_commit_id:
902 902 shadow_repo_with_hooks._local_push(
903 903 close_commit_id, self.path, push_branches=True,
904 904 enable_hooks=True)
905 905 merge_succeeded = True
906 906 except RepositoryError:
907 907 log.exception(
908 908 'Failure when doing local push from the shadow '
909 909 'repository to the target repository at %s.', self.path)
910 910 merge_succeeded = False
911 911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 912 metadata['target'] = 'hg shadow repo'
913 913 metadata['merge_commit'] = merge_commit_id
914 914 else:
915 915 merge_succeeded = True
916 916 else:
917 917 merge_succeeded = False
918 918
919 919 return MergeResponse(
920 920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 921 metadata=metadata)
922 922
923 923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 924 config = self.config.copy()
925 925 if not enable_hooks:
926 926 config.clear_section('hooks')
927 927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928 928
929 929 def _validate_pull_reference(self, reference):
930 930 if not (reference.name in self.bookmarks or
931 931 reference.name in self.branches or
932 932 self.get_commit(reference.commit_id)):
933 933 raise CommitDoesNotExistError(
934 934 'Unknown branch, bookmark or commit id')
935 935
936 936 def _local_pull(self, repository_path, reference):
937 937 """
938 938 Fetch a branch, bookmark or commit from a local repository.
939 939 """
940 940 repository_path = os.path.abspath(repository_path)
941 941 if repository_path == self.path:
942 942 raise ValueError('Cannot pull from the same repository')
943 943
944 944 reference_type_to_option_name = {
945 945 'book': 'bookmark',
946 946 'branch': 'branch',
947 947 }
948 948 option_name = reference_type_to_option_name.get(
949 949 reference.type, 'revision')
950 950
951 951 if option_name == 'revision':
952 952 ref = reference.commit_id
953 953 else:
954 954 ref = reference.name
955 955
956 956 options = {option_name: [ref]}
957 957 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 958 self._remote.invalidate_vcs_cache()
959 959
960 960 def bookmark(self, bookmark, revision=None):
961 961 if isinstance(bookmark, unicode):
962 962 bookmark = safe_str(bookmark)
963 963 self._remote.bookmark(bookmark, revision=revision)
964 964 self._remote.invalidate_vcs_cache()
965 965
966 966 def get_path_permissions(self, username):
967 967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968 968
969 969 def read_patterns(suffix):
970 970 svalue = None
971 971 for section, option in [
972 972 ('narrowacl', username + suffix),
973 973 ('narrowacl', 'default' + suffix),
974 974 ('narrowhgacl', username + suffix),
975 975 ('narrowhgacl', 'default' + suffix)
976 976 ]:
977 977 try:
978 978 svalue = hgacl.get(section, option)
979 979 break # stop at the first value we find
980 980 except configparser.NoOptionError:
981 981 pass
982 982 if not svalue:
983 983 return None
984 984 result = ['/']
985 985 for pattern in svalue.split():
986 986 result.append(pattern)
987 987 if '*' not in pattern and '?' not in pattern:
988 988 result.append(pattern + '/*')
989 989 return result
990 990
991 991 if os.path.exists(hgacl_file):
992 992 try:
993 993 hgacl = configparser.RawConfigParser()
994 994 hgacl.read(hgacl_file)
995 995
996 996 includes = read_patterns('.includes')
997 997 excludes = read_patterns('.excludes')
998 998 return BasePathPermissionChecker.create_from_patterns(
999 999 includes, excludes)
1000 1000 except BaseException as e:
1001 1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 1002 hgacl_file, self.name, e)
1003 1003 raise exceptions.RepositoryRequirementError(msg)
1004 1004 else:
1005 1005 return None
1006 1006
1007 1007
1008 1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009 1009
1010 1010 def _commit_factory(self, commit_id):
1011 1011 return self.repo.get_commit(
1012 1012 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,370 +1,370 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import date_astimestamp
33 33 from rhodecode.lib.utils import safe_str, safe_unicode
34 34 from rhodecode.lib.utils2 import CachedProperty
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.backends.svn.commit import (
38 38 SubversionCommit, _date_from_svn_properties)
39 39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
40 40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
44 44 VCSError, NodeDoesNotExistError)
45 45
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class SubversionRepository(base.BaseRepository):
51 51 """
52 52 Subversion backend implementation
53 53
54 54 .. important::
55 55
56 56 It is very important to distinguish the commit index and the commit id
57 57 which is assigned by Subversion. The first one is always handled as an
58 58 `int` by this implementation. The commit id assigned by Subversion on
59 59 the other side will always be a `str`.
60 60
61 61 There is a specific trap since the first commit will have the index
62 62 ``0`` but the svn id will be ``"1"``.
63 63
64 64 """
65 65
66 66 # Note: Subversion does not really have a default branch name.
67 67 DEFAULT_BRANCH_NAME = None
68 68
69 69 contact = base.BaseRepository.DEFAULT_CONTACT
70 70 description = base.BaseRepository.DEFAULT_DESCRIPTION
71 71
72 72 def __init__(self, repo_path, config=None, create=False, src_url=None, with_wire=None,
73 73 bare=False, **kwargs):
74 74 self.path = safe_str(os.path.abspath(repo_path))
75 75 self.config = config if config else self.get_default_config()
76 76 self.with_wire = with_wire or {"cache": False} # default should not use cache
77 77
78 78 self._init_repo(create, src_url)
79 79
80 80 # caches
81 81 self._commit_ids = {}
82 82
83 83 @LazyProperty
84 84 def _remote(self):
85 85 repo_id = self.path
86 86 return connection.Svn(self.path, repo_id, self.config, with_wire=self.with_wire)
87 87
88 88 def _init_repo(self, create, src_url):
89 89 if create and os.path.exists(self.path):
90 90 raise RepositoryError(
91 91 "Cannot create repository at %s, location already exist"
92 92 % self.path)
93 93
94 94 if create:
95 95 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
96 96 if src_url:
97 97 src_url = _sanitize_url(src_url)
98 98 self._remote.import_remote_repository(src_url)
99 99 else:
100 100 self._check_path()
101 101
102 102 @CachedProperty
103 103 def commit_ids(self):
104 104 head = self._remote.lookup(None)
105 105 return [str(r) for r in xrange(1, head + 1)]
106 106
107 107 def _rebuild_cache(self, commit_ids):
108 108 pass
109 109
110 110 def run_svn_command(self, cmd, **opts):
111 111 """
112 112 Runs given ``cmd`` as svn command and returns tuple
113 113 (stdout, stderr).
114 114
115 115 :param cmd: full svn command to be executed
116 116 :param opts: env options to pass into Subprocess command
117 117 """
118 118 if not isinstance(cmd, list):
119 119 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
120 120
121 121 skip_stderr_log = opts.pop('skip_stderr_log', False)
122 122 out, err = self._remote.run_svn_command(cmd, **opts)
123 123 if err and not skip_stderr_log:
124 124 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
125 125 return out, err
126 126
127 127 @LazyProperty
128 128 def branches(self):
129 129 return self._tags_or_branches('vcs_svn_branch')
130 130
131 131 @LazyProperty
132 132 def branches_closed(self):
133 133 return {}
134 134
135 135 @LazyProperty
136 136 def bookmarks(self):
137 137 return {}
138 138
139 139 @LazyProperty
140 140 def branches_all(self):
141 141 # TODO: johbo: Implement proper branch support
142 142 all_branches = {}
143 143 all_branches.update(self.branches)
144 144 all_branches.update(self.branches_closed)
145 145 return all_branches
146 146
147 147 @LazyProperty
148 148 def tags(self):
149 149 return self._tags_or_branches('vcs_svn_tag')
150 150
151 151 def _tags_or_branches(self, config_section):
152 152 found_items = {}
153 153
154 154 if self.is_empty():
155 155 return {}
156 156
157 157 for pattern in self._patterns_from_section(config_section):
158 158 pattern = vcspath.sanitize(pattern)
159 159 tip = self.get_commit()
160 160 try:
161 161 if pattern.endswith('*'):
162 162 basedir = tip.get_node(vcspath.dirname(pattern))
163 163 directories = basedir.dirs
164 164 else:
165 165 directories = (tip.get_node(pattern), )
166 166 except NodeDoesNotExistError:
167 167 continue
168 168 found_items.update(
169 169 (safe_unicode(n.path),
170 170 self.commit_ids[-1])
171 171 for n in directories)
172 172
173 173 def get_name(item):
174 174 return item[0]
175 175
176 176 return OrderedDict(sorted(found_items.items(), key=get_name))
177 177
178 178 def _patterns_from_section(self, section):
179 179 return (pattern for key, pattern in self.config.items(section))
180 180
181 181 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
182 182 if self != repo2:
183 183 raise ValueError(
184 184 "Subversion does not support getting common ancestor of"
185 185 " different repositories.")
186 186
187 187 if int(commit_id1) < int(commit_id2):
188 188 return commit_id1
189 189 return commit_id2
190 190
191 191 def verify(self):
192 192 verify = self._remote.verify()
193 193
194 194 self._remote.invalidate_vcs_cache()
195 195 return verify
196 196
197 197 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
198 198 # TODO: johbo: Implement better comparison, this is a very naive
199 199 # version which does not allow to compare branches, tags or folders
200 200 # at all.
201 201 if repo2 != self:
202 202 raise ValueError(
203 203 "Subversion does not support comparison of of different "
204 204 "repositories.")
205 205
206 206 if commit_id1 == commit_id2:
207 207 return []
208 208
209 209 commit_idx1 = self._get_commit_idx(commit_id1)
210 210 commit_idx2 = self._get_commit_idx(commit_id2)
211 211
212 212 commits = [
213 213 self.get_commit(commit_idx=idx)
214 214 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
215 215
216 216 return commits
217 217
218 218 def _get_commit_idx(self, commit_id):
219 219 try:
220 220 svn_rev = int(commit_id)
221 221 except:
222 222 # TODO: johbo: this might be only one case, HEAD, check this
223 223 svn_rev = self._remote.lookup(commit_id)
224 224 commit_idx = svn_rev - 1
225 225 if commit_idx >= len(self.commit_ids):
226 226 raise CommitDoesNotExistError(
227 227 "Commit at index %s does not exist." % (commit_idx, ))
228 228 return commit_idx
229 229
230 230 @staticmethod
231 231 def check_url(url, config):
232 232 """
233 233 Check if `url` is a valid source to import a Subversion repository.
234 234 """
235 235 # convert to URL if it's a local directory
236 236 if os.path.isdir(url):
237 237 url = 'file://' + urllib.pathname2url(url)
238 238 return connection.Svn.check_url(url, config.serialize())
239 239
240 240 @staticmethod
241 241 def is_valid_repository(path):
242 242 try:
243 243 SubversionRepository(path)
244 244 return True
245 245 except VCSError:
246 246 pass
247 247 return False
248 248
249 249 def _check_path(self):
250 250 if not os.path.exists(self.path):
251 251 raise VCSError('Path "%s" does not exist!' % (self.path, ))
252 252 if not self._remote.is_path_valid_repository(self.path):
253 253 raise VCSError(
254 254 'Path "%s" does not contain a Subversion repository' %
255 255 (self.path, ))
256 256
257 257 @LazyProperty
258 258 def last_change(self):
259 259 """
260 260 Returns last change made on this repository as
261 261 `datetime.datetime` object.
262 262 """
263 263 # Subversion always has a first commit which has id "0" and contains
264 264 # what we are looking for.
265 265 last_id = len(self.commit_ids)
266 266 properties = self._remote.revision_properties(last_id)
267 267 return _date_from_svn_properties(properties)
268 268
269 269 @LazyProperty
270 270 def in_memory_commit(self):
271 271 return SubversionInMemoryCommit(self)
272 272
273 273 def get_hook_location(self):
274 274 """
275 275 returns absolute path to location where hooks are stored
276 276 """
277 277 return os.path.join(self.path, 'hooks')
278 278
279 279 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
280 translate_tag=None, maybe_unreachable=False):
280 translate_tag=None, maybe_unreachable=False, reference_obj=None):
281 281 if self.is_empty():
282 282 raise EmptyRepositoryError("There are no commits yet")
283 283 if commit_id is not None:
284 284 self._validate_commit_id(commit_id)
285 285 elif commit_idx is not None:
286 286 self._validate_commit_idx(commit_idx)
287 287 try:
288 288 commit_id = self.commit_ids[commit_idx]
289 289 except IndexError:
290 290 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
291 291
292 292 commit_id = self._sanitize_commit_id(commit_id)
293 293 commit = SubversionCommit(repository=self, commit_id=commit_id)
294 294 return commit
295 295
296 296 def get_commits(
297 297 self, start_id=None, end_id=None, start_date=None, end_date=None,
298 298 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
299 299 if self.is_empty():
300 300 raise EmptyRepositoryError("There are no commit_ids yet")
301 301 self._validate_branch_name(branch_name)
302 302
303 303 if start_id is not None:
304 304 self._validate_commit_id(start_id)
305 305 if end_id is not None:
306 306 self._validate_commit_id(end_id)
307 307
308 308 start_raw_id = self._sanitize_commit_id(start_id)
309 309 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
310 310 end_raw_id = self._sanitize_commit_id(end_id)
311 311 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
312 312
313 313 if None not in [start_id, end_id] and start_pos > end_pos:
314 314 raise RepositoryError(
315 315 "Start commit '%s' cannot be after end commit '%s'" %
316 316 (start_id, end_id))
317 317 if end_pos is not None:
318 318 end_pos += 1
319 319
320 320 # Date based filtering
321 321 if start_date or end_date:
322 322 start_raw_id, end_raw_id = self._remote.lookup_interval(
323 323 date_astimestamp(start_date) if start_date else None,
324 324 date_astimestamp(end_date) if end_date else None)
325 325 start_pos = start_raw_id - 1
326 326 end_pos = end_raw_id
327 327
328 328 commit_ids = self.commit_ids
329 329
330 330 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
331 331 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
332 332 svn_rev = long(self.commit_ids[-1])
333 333 commit_ids = self._remote.node_history(
334 334 path=branch_name, revision=svn_rev, limit=None)
335 335 commit_ids = [str(i) for i in reversed(commit_ids)]
336 336
337 337 if start_pos or end_pos:
338 338 commit_ids = commit_ids[start_pos:end_pos]
339 339 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
340 340
341 341 def _sanitize_commit_id(self, commit_id):
342 342 if commit_id and commit_id.isdigit():
343 343 if int(commit_id) <= len(self.commit_ids):
344 344 return commit_id
345 345 else:
346 346 raise CommitDoesNotExistError(
347 347 "Commit %s does not exist." % (commit_id, ))
348 348 if commit_id not in [
349 349 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
350 350 raise CommitDoesNotExistError(
351 351 "Commit id %s not understood." % (commit_id, ))
352 352 svn_rev = self._remote.lookup('HEAD')
353 353 return str(svn_rev)
354 354
355 355 def get_diff(
356 356 self, commit1, commit2, path=None, ignore_whitespace=False,
357 357 context=3, path1=None):
358 358 self._validate_diff_commits(commit1, commit2)
359 359 svn_rev1 = long(commit1.raw_id)
360 360 svn_rev2 = long(commit2.raw_id)
361 361 diff = self._remote.diff(
362 362 svn_rev1, svn_rev2, path1=path1, path2=path,
363 363 ignore_whitespace=ignore_whitespace, context=context)
364 364 return SubversionDiff(diff)
365 365
366 366
367 367 def _sanitize_url(url):
368 368 if '://' not in url:
369 369 url = 'file://' + urllib.pathname2url(url)
370 370 return url
@@ -1,875 +1,876 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 23 """
24 24
25 25 import os
26 26 import stat
27 27
28 28 from zope.cachedescriptors.property import Lazy as LazyProperty
29 29
30 30 import rhodecode
31 31 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
32 32 from rhodecode.lib.utils import safe_unicode, safe_str
33 33 from rhodecode.lib.utils2 import md5
34 34 from rhodecode.lib.vcs import path as vcspath
35 35 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
36 36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
37 37 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
38 38
39 39 LARGEFILE_PREFIX = '.hglf'
40 40
41 41
42 42 class NodeKind:
43 43 SUBMODULE = -1
44 44 DIR = 1
45 45 FILE = 2
46 46 LARGEFILE = 3
47 47
48 48
49 49 class NodeState:
50 50 ADDED = u'added'
51 51 CHANGED = u'changed'
52 52 NOT_CHANGED = u'not changed'
53 53 REMOVED = u'removed'
54 54
55 55
56 56 class NodeGeneratorBase(object):
57 57 """
58 58 Base class for removed added and changed filenodes, it's a lazy generator
59 59 class that will create filenodes only on iteration or call
60 60
61 61 The len method doesn't need to create filenodes at all
62 62 """
63 63
64 64 def __init__(self, current_paths, cs):
65 65 self.cs = cs
66 66 self.current_paths = current_paths
67 67
68 68 def __call__(self):
69 69 return [n for n in self]
70 70
71 71 def __getslice__(self, i, j):
72 72 for p in self.current_paths[i:j]:
73 73 yield self.cs.get_node(p)
74 74
75 75 def __len__(self):
76 76 return len(self.current_paths)
77 77
78 78 def __iter__(self):
79 79 for p in self.current_paths:
80 80 yield self.cs.get_node(p)
81 81
82 82
83 83 class AddedFileNodesGenerator(NodeGeneratorBase):
84 84 """
85 85 Class holding added files for current commit
86 86 """
87 87
88 88
89 89 class ChangedFileNodesGenerator(NodeGeneratorBase):
90 90 """
91 91 Class holding changed files for current commit
92 92 """
93 93
94 94
95 95 class RemovedFileNodesGenerator(NodeGeneratorBase):
96 96 """
97 97 Class holding removed files for current commit
98 98 """
99 99 def __iter__(self):
100 100 for p in self.current_paths:
101 101 yield RemovedFileNode(path=p)
102 102
103 103 def __getslice__(self, i, j):
104 104 for p in self.current_paths[i:j]:
105 105 yield RemovedFileNode(path=p)
106 106
107 107
108 108 class Node(object):
109 109 """
110 110 Simplest class representing file or directory on repository. SCM backends
111 111 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
112 112 directly.
113 113
114 114 Node's ``path`` cannot start with slash as we operate on *relative* paths
115 115 only. Moreover, every single node is identified by the ``path`` attribute,
116 116 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
117 117 """
118 118 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
119 119 # security attacks could be used with this
120 120 commit = None
121 121
122 122 def __init__(self, path, kind):
123 123 self._validate_path(path) # can throw exception if path is invalid
124 124 self.path = safe_str(path.rstrip('/')) # we store paths as str
125 125 if path == '' and kind != NodeKind.DIR:
126 126 raise NodeError("Only DirNode and its subclasses may be "
127 127 "initialized with empty path")
128 128 self.kind = kind
129 129
130 130 if self.is_root() and not self.is_dir():
131 131 raise NodeError("Root node cannot be FILE kind")
132 132
133 133 def _validate_path(self, path):
134 134 if path.startswith('/'):
135 135 raise NodeError(
136 136 "Cannot initialize Node objects with slash at "
137 137 "the beginning as only relative paths are supported. "
138 138 "Got %s" % (path,))
139 139
140 140 @LazyProperty
141 141 def parent(self):
142 142 parent_path = self.get_parent_path()
143 143 if parent_path:
144 144 if self.commit:
145 145 return self.commit.get_node(parent_path)
146 146 return DirNode(parent_path)
147 147 return None
148 148
149 149 @LazyProperty
150 150 def unicode_path(self):
151 151 return safe_unicode(self.path)
152 152
153 153 @LazyProperty
154 154 def has_rtlo(self):
155 155 """Detects if a path has right-to-left-override marker"""
156 156 return self.RTLO_MARKER in self.unicode_path
157 157
158 158 @LazyProperty
159 159 def unicode_path_safe(self):
160 160 """
161 161 Special SAFE representation of path without the right-to-left-override.
162 162 This should be only used for "showing" the file, cannot be used for any
163 163 urls etc.
164 164 """
165 165 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
166 166
167 167 @LazyProperty
168 168 def dir_path(self):
169 169 """
170 170 Returns name of the directory from full path of this vcs node. Empty
171 171 string is returned if there's no directory in the path
172 172 """
173 173 _parts = self.path.rstrip('/').rsplit('/', 1)
174 174 if len(_parts) == 2:
175 175 return safe_unicode(_parts[0])
176 176 return u''
177 177
178 178 @LazyProperty
179 179 def name(self):
180 180 """
181 181 Returns name of the node so if its path
182 182 then only last part is returned.
183 183 """
184 184 return safe_unicode(self.path.rstrip('/').split('/')[-1])
185 185
186 186 @property
187 187 def kind(self):
188 188 return self._kind
189 189
190 190 @kind.setter
191 191 def kind(self, kind):
192 192 if hasattr(self, '_kind'):
193 193 raise NodeError("Cannot change node's kind")
194 194 else:
195 195 self._kind = kind
196 196 # Post setter check (path's trailing slash)
197 197 if self.path.endswith('/'):
198 198 raise NodeError("Node's path cannot end with slash")
199 199
200 200 def __cmp__(self, other):
201 201 """
202 202 Comparator using name of the node, needed for quick list sorting.
203 203 """
204 204
205 205 kind_cmp = cmp(self.kind, other.kind)
206 206 if kind_cmp:
207 207 if isinstance(self, SubModuleNode):
208 208 # we make submodules equal to dirnode for "sorting" purposes
209 209 return NodeKind.DIR
210 210 return kind_cmp
211 211 return cmp(self.name, other.name)
212 212
213 213 def __eq__(self, other):
214 214 for attr in ['name', 'path', 'kind']:
215 215 if getattr(self, attr) != getattr(other, attr):
216 216 return False
217 217 if self.is_file():
218 218 if self.content != other.content:
219 219 return False
220 220 else:
221 221 # For DirNode's check without entering each dir
222 222 self_nodes_paths = list(sorted(n.path for n in self.nodes))
223 223 other_nodes_paths = list(sorted(n.path for n in self.nodes))
224 224 if self_nodes_paths != other_nodes_paths:
225 225 return False
226 226 return True
227 227
228 228 def __ne__(self, other):
229 229 return not self.__eq__(other)
230 230
231 231 def __repr__(self):
232 232 return '<%s %r>' % (self.__class__.__name__, self.path)
233 233
234 234 def __str__(self):
235 235 return self.__repr__()
236 236
237 237 def __unicode__(self):
238 238 return self.name
239 239
240 240 def get_parent_path(self):
241 241 """
242 242 Returns node's parent path or empty string if node is root.
243 243 """
244 244 if self.is_root():
245 245 return ''
246 246 return vcspath.dirname(self.path.rstrip('/')) + '/'
247 247
248 248 def is_file(self):
249 249 """
250 250 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
251 251 otherwise.
252 252 """
253 253 return self.kind == NodeKind.FILE
254 254
255 255 def is_dir(self):
256 256 """
257 257 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
258 258 otherwise.
259 259 """
260 260 return self.kind == NodeKind.DIR
261 261
262 262 def is_root(self):
263 263 """
264 264 Returns ``True`` if node is a root node and ``False`` otherwise.
265 265 """
266 266 return self.kind == NodeKind.DIR and self.path == ''
267 267
268 268 def is_submodule(self):
269 269 """
270 270 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
271 271 otherwise.
272 272 """
273 273 return self.kind == NodeKind.SUBMODULE
274 274
275 275 def is_largefile(self):
276 276 """
277 277 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
278 278 otherwise
279 279 """
280 280 return self.kind == NodeKind.LARGEFILE
281 281
282 282 def is_link(self):
283 283 if self.commit:
284 284 return self.commit.is_link(self.path)
285 285 return False
286 286
287 287 @LazyProperty
288 288 def added(self):
289 289 return self.state is NodeState.ADDED
290 290
291 291 @LazyProperty
292 292 def changed(self):
293 293 return self.state is NodeState.CHANGED
294 294
295 295 @LazyProperty
296 296 def not_changed(self):
297 297 return self.state is NodeState.NOT_CHANGED
298 298
299 299 @LazyProperty
300 300 def removed(self):
301 301 return self.state is NodeState.REMOVED
302 302
303 303
304 304 class FileNode(Node):
305 305 """
306 306 Class representing file nodes.
307 307
308 308 :attribute: path: path to the node, relative to repository's root
309 309 :attribute: content: if given arbitrary sets content of the file
310 310 :attribute: commit: if given, first time content is accessed, callback
311 311 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
312 312 """
313 313 _filter_pre_load = []
314 314
315 315 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
316 316 """
317 317 Only one of ``content`` and ``commit`` may be given. Passing both
318 318 would raise ``NodeError`` exception.
319 319
320 320 :param path: relative path to the node
321 321 :param content: content may be passed to constructor
322 322 :param commit: if given, will use it to lazily fetch content
323 323 :param mode: ST_MODE (i.e. 0100644)
324 324 """
325 325 if content and commit:
326 326 raise NodeError("Cannot use both content and commit")
327 327 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
328 328 self.commit = commit
329 329 self._content = content
330 330 self._mode = mode or FILEMODE_DEFAULT
331 331
332 332 self._set_bulk_properties(pre_load)
333 333
334 334 def _set_bulk_properties(self, pre_load):
335 335 if not pre_load:
336 336 return
337 337 pre_load = [entry for entry in pre_load
338 338 if entry not in self._filter_pre_load]
339 339 if not pre_load:
340 340 return
341 341
342 342 for attr_name in pre_load:
343 343 result = getattr(self, attr_name)
344 344 if callable(result):
345 345 result = result()
346 346 self.__dict__[attr_name] = result
347 347
348 348 @LazyProperty
349 349 def mode(self):
350 350 """
351 351 Returns lazily mode of the FileNode. If `commit` is not set, would
352 352 use value given at initialization or `FILEMODE_DEFAULT` (default).
353 353 """
354 354 if self.commit:
355 355 mode = self.commit.get_file_mode(self.path)
356 356 else:
357 357 mode = self._mode
358 358 return mode
359 359
360 360 @LazyProperty
361 361 def raw_bytes(self):
362 362 """
363 363 Returns lazily the raw bytes of the FileNode.
364 364 """
365 365 if self.commit:
366 366 if self._content is None:
367 367 self._content = self.commit.get_file_content(self.path)
368 368 content = self._content
369 369 else:
370 370 content = self._content
371 371 return content
372 372
373 373 def stream_bytes(self):
374 374 """
375 375 Returns an iterator that will stream the content of the file directly from
376 376 vcsserver without loading it to memory.
377 377 """
378 378 if self.commit:
379 379 return self.commit.get_file_content_streamed(self.path)
380 380 raise NodeError("Cannot retrieve stream_bytes without related commit attribute")
381 381
382 382 @LazyProperty
383 383 def md5(self):
384 384 """
385 385 Returns md5 of the file node.
386 386 """
387 387 return md5(self.raw_bytes)
388 388
389 389 def metadata_uncached(self):
390 390 """
391 391 Returns md5, binary flag of the file node, without any cache usage.
392 392 """
393 393
394 394 content = self.content_uncached()
395 395
396 396 is_binary = content and '\0' in content
397 397 size = 0
398 398 if content:
399 399 size = len(content)
400 400
401 401 return is_binary, md5(content), size, content
402 402
403 403 def content_uncached(self):
404 404 """
405 405 Returns lazily content of the FileNode. If possible, would try to
406 406 decode content from UTF-8.
407 407 """
408 408 if self.commit:
409 409 content = self.commit.get_file_content(self.path)
410 410 else:
411 411 content = self._content
412 412 return content
413 413
414 414 @LazyProperty
415 415 def content(self):
416 416 """
417 417 Returns lazily content of the FileNode. If possible, would try to
418 418 decode content from UTF-8.
419 419 """
420 420 content = self.raw_bytes
421 421
422 422 if self.is_binary:
423 423 return content
424 424 return safe_unicode(content)
425 425
426 426 @LazyProperty
427 427 def size(self):
428 428 if self.commit:
429 429 return self.commit.get_file_size(self.path)
430 430 raise NodeError(
431 431 "Cannot retrieve size of the file without related "
432 432 "commit attribute")
433 433
434 434 @LazyProperty
435 435 def message(self):
436 436 if self.commit:
437 437 return self.last_commit.message
438 438 raise NodeError(
439 439 "Cannot retrieve message of the file without related "
440 440 "commit attribute")
441 441
442 442 @LazyProperty
443 443 def last_commit(self):
444 444 if self.commit:
445 445 pre_load = ["author", "date", "message", "parents"]
446 446 return self.commit.get_path_commit(self.path, pre_load=pre_load)
447 447 raise NodeError(
448 448 "Cannot retrieve last commit of the file without "
449 449 "related commit attribute")
450 450
451 451 def get_mimetype(self):
452 452 """
453 453 Mimetype is calculated based on the file's content. If ``_mimetype``
454 454 attribute is available, it will be returned (backends which store
455 455 mimetypes or can easily recognize them, should set this private
456 456 attribute to indicate that type should *NOT* be calculated).
457 457 """
458 458
459 459 if hasattr(self, '_mimetype'):
460 460 if (isinstance(self._mimetype, (tuple, list,)) and
461 461 len(self._mimetype) == 2):
462 462 return self._mimetype
463 463 else:
464 464 raise NodeError('given _mimetype attribute must be an 2 '
465 465 'element list or tuple')
466 466
467 467 db = get_mimetypes_db()
468 468 mtype, encoding = db.guess_type(self.name)
469 469
470 470 if mtype is None:
471 if self.is_binary:
471 if not self.is_largefile() and self.is_binary:
472 472 mtype = 'application/octet-stream'
473 473 encoding = None
474 474 else:
475 475 mtype = 'text/plain'
476 476 encoding = None
477 477
478 478 # try with pygments
479 479 try:
480 480 from pygments.lexers import get_lexer_for_filename
481 481 mt = get_lexer_for_filename(self.name).mimetypes
482 482 except Exception:
483 483 mt = None
484 484
485 485 if mt:
486 486 mtype = mt[0]
487 487
488 488 return mtype, encoding
489 489
490 490 @LazyProperty
491 491 def mimetype(self):
492 492 """
493 493 Wrapper around full mimetype info. It returns only type of fetched
494 494 mimetype without the encoding part. use get_mimetype function to fetch
495 495 full set of (type,encoding)
496 496 """
497 497 return self.get_mimetype()[0]
498 498
499 499 @LazyProperty
500 500 def mimetype_main(self):
501 501 return self.mimetype.split('/')[0]
502 502
503 503 @classmethod
504 504 def get_lexer(cls, filename, content=None):
505 505 from pygments import lexers
506 506
507 507 extension = filename.split('.')[-1]
508 508 lexer = None
509 509
510 510 try:
511 511 lexer = lexers.guess_lexer_for_filename(
512 512 filename, content, stripnl=False)
513 513 except lexers.ClassNotFound:
514 514 lexer = None
515 515
516 516 # try our EXTENSION_MAP
517 517 if not lexer:
518 518 try:
519 519 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
520 520 if lexer_class:
521 521 lexer = lexers.get_lexer_by_name(lexer_class[0])
522 522 except lexers.ClassNotFound:
523 523 lexer = None
524 524
525 525 if not lexer:
526 526 lexer = lexers.TextLexer(stripnl=False)
527 527
528 528 return lexer
529 529
530 530 @LazyProperty
531 531 def lexer(self):
532 532 """
533 533 Returns pygment's lexer class. Would try to guess lexer taking file's
534 534 content, name and mimetype.
535 535 """
536 536 return self.get_lexer(self.name, self.content)
537 537
538 538 @LazyProperty
539 539 def lexer_alias(self):
540 540 """
541 541 Returns first alias of the lexer guessed for this file.
542 542 """
543 543 return self.lexer.aliases[0]
544 544
545 545 @LazyProperty
546 546 def history(self):
547 547 """
548 548 Returns a list of commit for this file in which the file was changed
549 549 """
550 550 if self.commit is None:
551 551 raise NodeError('Unable to get commit for this FileNode')
552 552 return self.commit.get_path_history(self.path)
553 553
554 554 @LazyProperty
555 555 def annotate(self):
556 556 """
557 557 Returns a list of three element tuples with lineno, commit and line
558 558 """
559 559 if self.commit is None:
560 560 raise NodeError('Unable to get commit for this FileNode')
561 561 pre_load = ["author", "date", "message", "parents"]
562 562 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
563 563
564 564 @LazyProperty
565 565 def state(self):
566 566 if not self.commit:
567 567 raise NodeError(
568 568 "Cannot check state of the node if it's not "
569 569 "linked with commit")
570 570 elif self.path in (node.path for node in self.commit.added):
571 571 return NodeState.ADDED
572 572 elif self.path in (node.path for node in self.commit.changed):
573 573 return NodeState.CHANGED
574 574 else:
575 575 return NodeState.NOT_CHANGED
576 576
577 577 @LazyProperty
578 578 def is_binary(self):
579 579 """
580 580 Returns True if file has binary content.
581 581 """
582 582 if self.commit:
583 583 return self.commit.is_node_binary(self.path)
584 584 else:
585 585 raw_bytes = self._content
586 586 return raw_bytes and '\0' in raw_bytes
587 587
588 588 @LazyProperty
589 589 def extension(self):
590 590 """Returns filenode extension"""
591 591 return self.name.split('.')[-1]
592 592
593 593 @property
594 594 def is_executable(self):
595 595 """
596 596 Returns ``True`` if file has executable flag turned on.
597 597 """
598 598 return bool(self.mode & stat.S_IXUSR)
599 599
600 600 def get_largefile_node(self):
601 601 """
602 602 Try to return a Mercurial FileNode from this node. It does internal
603 603 checks inside largefile store, if that file exist there it will
604 604 create special instance of LargeFileNode which can get content from
605 605 LF store.
606 606 """
607 607 if self.commit:
608 608 return self.commit.get_largefile_node(self.path)
609 609
610 610 def count_lines(self, content, count_empty=False):
611 611
612 612 if count_empty:
613 613 all_lines = 0
614 614 empty_lines = 0
615 615 for line in content.splitlines(True):
616 616 if line == '\n':
617 617 empty_lines += 1
618 618 all_lines += 1
619 619
620 620 return all_lines, all_lines - empty_lines
621 621 else:
622 622 # fast method
623 623 empty_lines = all_lines = content.count('\n')
624 624 if all_lines == 0 and content:
625 625 # one-line without a newline
626 626 empty_lines = all_lines = 1
627 627
628 628 return all_lines, empty_lines
629 629
630 630 def lines(self, count_empty=False):
631 631 all_lines, empty_lines = 0, 0
632 632
633 633 if not self.is_binary:
634 634 content = self.content
635 635 all_lines, empty_lines = self.count_lines(content, count_empty=count_empty)
636 636 return all_lines, empty_lines
637 637
638 638 def __repr__(self):
639 639 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
640 640 getattr(self.commit, 'short_id', ''))
641 641
642 642
643 643 class RemovedFileNode(FileNode):
644 644 """
645 645 Dummy FileNode class - trying to access any public attribute except path,
646 646 name, kind or state (or methods/attributes checking those two) would raise
647 647 RemovedFileNodeError.
648 648 """
649 649 ALLOWED_ATTRIBUTES = [
650 650 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
651 651 'added', 'changed', 'not_changed', 'removed'
652 652 ]
653 653
654 654 def __init__(self, path):
655 655 """
656 656 :param path: relative path to the node
657 657 """
658 658 super(RemovedFileNode, self).__init__(path=path)
659 659
660 660 def __getattribute__(self, attr):
661 661 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
662 662 return super(RemovedFileNode, self).__getattribute__(attr)
663 663 raise RemovedFileNodeError(
664 664 "Cannot access attribute %s on RemovedFileNode" % attr)
665 665
666 666 @LazyProperty
667 667 def state(self):
668 668 return NodeState.REMOVED
669 669
670 670
671 671 class DirNode(Node):
672 672 """
673 673 DirNode stores list of files and directories within this node.
674 674 Nodes may be used standalone but within repository context they
675 675 lazily fetch data within same repository's commit.
676 676 """
677 677
678 678 def __init__(self, path, nodes=(), commit=None):
679 679 """
680 680 Only one of ``nodes`` and ``commit`` may be given. Passing both
681 681 would raise ``NodeError`` exception.
682 682
683 683 :param path: relative path to the node
684 684 :param nodes: content may be passed to constructor
685 685 :param commit: if given, will use it to lazily fetch content
686 686 """
687 687 if nodes and commit:
688 688 raise NodeError("Cannot use both nodes and commit")
689 689 super(DirNode, self).__init__(path, NodeKind.DIR)
690 690 self.commit = commit
691 691 self._nodes = nodes
692 692
693 693 @LazyProperty
694 694 def content(self):
695 695 raise NodeError(
696 696 "%s represents a dir and has no `content` attribute" % self)
697 697
698 698 @LazyProperty
699 699 def nodes(self):
700 700 if self.commit:
701 701 nodes = self.commit.get_nodes(self.path)
702 702 else:
703 703 nodes = self._nodes
704 704 self._nodes_dict = dict((node.path, node) for node in nodes)
705 705 return sorted(nodes)
706 706
707 707 @LazyProperty
708 708 def files(self):
709 709 return sorted((node for node in self.nodes if node.is_file()))
710 710
711 711 @LazyProperty
712 712 def dirs(self):
713 713 return sorted((node for node in self.nodes if node.is_dir()))
714 714
715 715 def __iter__(self):
716 716 for node in self.nodes:
717 717 yield node
718 718
719 719 def get_node(self, path):
720 720 """
721 721 Returns node from within this particular ``DirNode``, so it is now
722 722 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
723 723 'docs'. In order to access deeper nodes one must fetch nodes between
724 724 them first - this would work::
725 725
726 726 docs = root.get_node('docs')
727 727 docs.get_node('api').get_node('index.rst')
728 728
729 729 :param: path - relative to the current node
730 730
731 731 .. note::
732 732 To access lazily (as in example above) node have to be initialized
733 733 with related commit object - without it node is out of
734 734 context and may know nothing about anything else than nearest
735 735 (located at same level) nodes.
736 736 """
737 737 try:
738 738 path = path.rstrip('/')
739 739 if path == '':
740 740 raise NodeError("Cannot retrieve node without path")
741 741 self.nodes # access nodes first in order to set _nodes_dict
742 742 paths = path.split('/')
743 743 if len(paths) == 1:
744 744 if not self.is_root():
745 745 path = '/'.join((self.path, paths[0]))
746 746 else:
747 747 path = paths[0]
748 748 return self._nodes_dict[path]
749 749 elif len(paths) > 1:
750 750 if self.commit is None:
751 751 raise NodeError("Cannot access deeper nodes without commit")
752 752 else:
753 753 path1, path2 = paths[0], '/'.join(paths[1:])
754 754 return self.get_node(path1).get_node(path2)
755 755 else:
756 756 raise KeyError
757 757 except KeyError:
758 758 raise NodeError("Node does not exist at %s" % path)
759 759
760 760 @LazyProperty
761 761 def state(self):
762 762 raise NodeError("Cannot access state of DirNode")
763 763
764 764 @LazyProperty
765 765 def size(self):
766 766 size = 0
767 767 for root, dirs, files in self.commit.walk(self.path):
768 768 for f in files:
769 769 size += f.size
770 770
771 771 return size
772 772
773 773 @LazyProperty
774 774 def last_commit(self):
775 775 if self.commit:
776 776 pre_load = ["author", "date", "message", "parents"]
777 777 return self.commit.get_path_commit(self.path, pre_load=pre_load)
778 778 raise NodeError(
779 779 "Cannot retrieve last commit of the file without "
780 780 "related commit attribute")
781 781
782 782 def __repr__(self):
783 783 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
784 784 getattr(self.commit, 'short_id', ''))
785 785
786 786
787 787 class RootNode(DirNode):
788 788 """
789 789 DirNode being the root node of the repository.
790 790 """
791 791
792 792 def __init__(self, nodes=(), commit=None):
793 793 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
794 794
795 795 def __repr__(self):
796 796 return '<%s>' % self.__class__.__name__
797 797
798 798
799 799 class SubModuleNode(Node):
800 800 """
801 801 represents a SubModule of Git or SubRepo of Mercurial
802 802 """
803 803 is_binary = False
804 804 size = 0
805 805
806 806 def __init__(self, name, url=None, commit=None, alias=None):
807 807 self.path = name
808 808 self.kind = NodeKind.SUBMODULE
809 809 self.alias = alias
810 810
811 811 # we have to use EmptyCommit here since this can point to svn/git/hg
812 812 # submodules we cannot get from repository
813 813 self.commit = EmptyCommit(str(commit), alias=alias)
814 814 self.url = url or self._extract_submodule_url()
815 815
816 816 def __repr__(self):
817 817 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
818 818 getattr(self.commit, 'short_id', ''))
819 819
820 820 def _extract_submodule_url(self):
821 821 # TODO: find a way to parse gits submodule file and extract the
822 822 # linking URL
823 823 return self.path
824 824
825 825 @LazyProperty
826 826 def name(self):
827 827 """
828 828 Returns name of the node so if its path
829 829 then only last part is returned.
830 830 """
831 831 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
832 832 return u'%s @ %s' % (org, self.commit.short_id)
833 833
834 834
835 835 class LargeFileNode(FileNode):
836 836
837 837 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
838 838 self.path = path
839 839 self.org_path = org_path
840 840 self.kind = NodeKind.LARGEFILE
841 841 self.alias = alias
842 self._content = ''
842 843
843 844 def _validate_path(self, path):
844 845 """
845 846 we override check since the LargeFileNode path is system absolute
846 847 """
847 848 pass
848 849
849 850 def __repr__(self):
850 851 return '<%s %r>' % (self.__class__.__name__, self.path)
851 852
852 853 @LazyProperty
853 854 def size(self):
854 855 return os.stat(self.path).st_size
855 856
856 857 @LazyProperty
857 858 def raw_bytes(self):
858 859 with open(self.path, 'rb') as f:
859 860 content = f.read()
860 861 return content
861 862
862 863 @LazyProperty
863 864 def name(self):
864 865 """
865 866 Overwrites name to be the org lf path
866 867 """
867 868 return self.org_path
868 869
869 870 def stream_bytes(self):
870 871 with open(self.path, 'rb') as stream:
871 872 while True:
872 873 data = stream.read(16 * 1024)
873 874 if not data:
874 875 break
875 876 yield data
@@ -1,5836 +1,5836 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import string
29 29 import hashlib
30 30 import logging
31 31 import datetime
32 32 import uuid
33 33 import warnings
34 34 import ipaddress
35 35 import functools
36 36 import traceback
37 37 import collections
38 38
39 39 from sqlalchemy import (
40 40 or_, and_, not_, func, cast, TypeDecorator, event,
41 41 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
42 42 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
43 43 Text, Float, PickleType, BigInteger)
44 44 from sqlalchemy.sql.expression import true, false, case
45 45 from sqlalchemy.sql.functions import coalesce, count # pragma: no cover
46 46 from sqlalchemy.orm import (
47 47 relationship, joinedload, class_mapper, validates, aliased)
48 48 from sqlalchemy.ext.declarative import declared_attr
49 49 from sqlalchemy.ext.hybrid import hybrid_property
50 50 from sqlalchemy.exc import IntegrityError # pragma: no cover
51 51 from sqlalchemy.dialects.mysql import LONGTEXT
52 52 from zope.cachedescriptors.property import Lazy as LazyProperty
53 53 from pyramid import compat
54 54 from pyramid.threadlocal import get_current_request
55 55 from webhelpers2.text import remove_formatting
56 56
57 57 from rhodecode.translation import _
58 58 from rhodecode.lib.vcs import get_vcs_instance, VCSError
59 59 from rhodecode.lib.vcs.backends.base import (
60 60 EmptyCommit, Reference, unicode_to_reference, reference_to_unicode)
61 61 from rhodecode.lib.utils2 import (
62 62 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
63 63 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
64 64 glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time, OrderedDefaultDict)
65 65 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
66 66 JsonRaw
67 67 from rhodecode.lib.ext_json import json
68 68 from rhodecode.lib.caching_query import FromCache
69 69 from rhodecode.lib.encrypt import AESCipher, validate_and_get_enc_data
70 70 from rhodecode.lib.encrypt2 import Encryptor
71 71 from rhodecode.lib.exceptions import (
72 72 ArtifactMetadataDuplicate, ArtifactMetadataBadValueType)
73 73 from rhodecode.model.meta import Base, Session
74 74
75 75 URL_SEP = '/'
76 76 log = logging.getLogger(__name__)
77 77
78 78 # =============================================================================
79 79 # BASE CLASSES
80 80 # =============================================================================
81 81
82 82 # this is propagated from .ini file rhodecode.encrypted_values.secret or
83 83 # beaker.session.secret if first is not set.
84 84 # and initialized at environment.py
85 85 ENCRYPTION_KEY = None
86 86
87 87 # used to sort permissions by types, '#' used here is not allowed to be in
88 88 # usernames, and it's very early in sorted string.printable table.
89 89 PERMISSION_TYPE_SORT = {
90 90 'admin': '####',
91 91 'write': '###',
92 92 'read': '##',
93 93 'none': '#',
94 94 }
95 95
96 96
97 97 def display_user_sort(obj):
98 98 """
99 99 Sort function used to sort permissions in .permissions() function of
100 100 Repository, RepoGroup, UserGroup. Also it put the default user in front
101 101 of all other resources
102 102 """
103 103
104 104 if obj.username == User.DEFAULT_USER:
105 105 return '#####'
106 106 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
107 107 extra_sort_num = '1' # default
108 108
109 109 # NOTE(dan): inactive duplicates goes last
110 110 if getattr(obj, 'duplicate_perm', None):
111 111 extra_sort_num = '9'
112 112 return prefix + extra_sort_num + obj.username
113 113
114 114
115 115 def display_user_group_sort(obj):
116 116 """
117 117 Sort function used to sort permissions in .permissions() function of
118 118 Repository, RepoGroup, UserGroup. Also it put the default user in front
119 119 of all other resources
120 120 """
121 121
122 122 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
123 123 return prefix + obj.users_group_name
124 124
125 125
126 126 def _hash_key(k):
127 127 return sha1_safe(k)
128 128
129 129
130 130 def in_filter_generator(qry, items, limit=500):
131 131 """
132 132 Splits IN() into multiple with OR
133 133 e.g.::
134 134 cnt = Repository.query().filter(
135 135 or_(
136 136 *in_filter_generator(Repository.repo_id, range(100000))
137 137 )).count()
138 138 """
139 139 if not items:
140 140 # empty list will cause empty query which might cause security issues
141 141 # this can lead to hidden unpleasant results
142 142 items = [-1]
143 143
144 144 parts = []
145 145 for chunk in xrange(0, len(items), limit):
146 146 parts.append(
147 147 qry.in_(items[chunk: chunk + limit])
148 148 )
149 149
150 150 return parts
151 151
152 152
153 153 base_table_args = {
154 154 'extend_existing': True,
155 155 'mysql_engine': 'InnoDB',
156 156 'mysql_charset': 'utf8',
157 157 'sqlite_autoincrement': True
158 158 }
159 159
160 160
161 161 class EncryptedTextValue(TypeDecorator):
162 162 """
163 163 Special column for encrypted long text data, use like::
164 164
165 165 value = Column("encrypted_value", EncryptedValue(), nullable=False)
166 166
167 167 This column is intelligent so if value is in unencrypted form it return
168 168 unencrypted form, but on save it always encrypts
169 169 """
170 170 impl = Text
171 171
172 172 def process_bind_param(self, value, dialect):
173 173 """
174 174 Setter for storing value
175 175 """
176 176 import rhodecode
177 177 if not value:
178 178 return value
179 179
180 180 # protect against double encrypting if values is already encrypted
181 181 if value.startswith('enc$aes$') \
182 182 or value.startswith('enc$aes_hmac$') \
183 183 or value.startswith('enc2$'):
184 184 raise ValueError('value needs to be in unencrypted format, '
185 185 'ie. not starting with enc$ or enc2$')
186 186
187 187 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
188 188 if algo == 'aes':
189 189 return 'enc$aes_hmac$%s' % AESCipher(ENCRYPTION_KEY, hmac=True).encrypt(value)
190 190 elif algo == 'fernet':
191 191 return Encryptor(ENCRYPTION_KEY).encrypt(value)
192 192 else:
193 193 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
194 194
195 195 def process_result_value(self, value, dialect):
196 196 """
197 197 Getter for retrieving value
198 198 """
199 199
200 200 import rhodecode
201 201 if not value:
202 202 return value
203 203
204 204 algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes'
205 205 enc_strict_mode = str2bool(rhodecode.CONFIG.get('rhodecode.encrypted_values.strict') or True)
206 206 if algo == 'aes':
207 207 decrypted_data = validate_and_get_enc_data(value, ENCRYPTION_KEY, enc_strict_mode)
208 208 elif algo == 'fernet':
209 209 return Encryptor(ENCRYPTION_KEY).decrypt(value)
210 210 else:
211 211 ValueError('Bad encryption algorithm, should be fernet or aes, got: {}'.format(algo))
212 212 return decrypted_data
213 213
214 214
215 215 class BaseModel(object):
216 216 """
217 217 Base Model for all classes
218 218 """
219 219
220 220 @classmethod
221 221 def _get_keys(cls):
222 222 """return column names for this model """
223 223 return class_mapper(cls).c.keys()
224 224
225 225 def get_dict(self):
226 226 """
227 227 return dict with keys and values corresponding
228 228 to this model data """
229 229
230 230 d = {}
231 231 for k in self._get_keys():
232 232 d[k] = getattr(self, k)
233 233
234 234 # also use __json__() if present to get additional fields
235 235 _json_attr = getattr(self, '__json__', None)
236 236 if _json_attr:
237 237 # update with attributes from __json__
238 238 if callable(_json_attr):
239 239 _json_attr = _json_attr()
240 240 for k, val in _json_attr.iteritems():
241 241 d[k] = val
242 242 return d
243 243
244 244 def get_appstruct(self):
245 245 """return list with keys and values tuples corresponding
246 246 to this model data """
247 247
248 248 lst = []
249 249 for k in self._get_keys():
250 250 lst.append((k, getattr(self, k),))
251 251 return lst
252 252
253 253 def populate_obj(self, populate_dict):
254 254 """populate model with data from given populate_dict"""
255 255
256 256 for k in self._get_keys():
257 257 if k in populate_dict:
258 258 setattr(self, k, populate_dict[k])
259 259
260 260 @classmethod
261 261 def query(cls):
262 262 return Session().query(cls)
263 263
264 264 @classmethod
265 265 def get(cls, id_):
266 266 if id_:
267 267 return cls.query().get(id_)
268 268
269 269 @classmethod
270 270 def get_or_404(cls, id_):
271 271 from pyramid.httpexceptions import HTTPNotFound
272 272
273 273 try:
274 274 id_ = int(id_)
275 275 except (TypeError, ValueError):
276 276 raise HTTPNotFound()
277 277
278 278 res = cls.query().get(id_)
279 279 if not res:
280 280 raise HTTPNotFound()
281 281 return res
282 282
283 283 @classmethod
284 284 def getAll(cls):
285 285 # deprecated and left for backward compatibility
286 286 return cls.get_all()
287 287
288 288 @classmethod
289 289 def get_all(cls):
290 290 return cls.query().all()
291 291
292 292 @classmethod
293 293 def delete(cls, id_):
294 294 obj = cls.query().get(id_)
295 295 Session().delete(obj)
296 296
297 297 @classmethod
298 298 def identity_cache(cls, session, attr_name, value):
299 299 exist_in_session = []
300 300 for (item_cls, pkey), instance in session.identity_map.items():
301 301 if cls == item_cls and getattr(instance, attr_name) == value:
302 302 exist_in_session.append(instance)
303 303 if exist_in_session:
304 304 if len(exist_in_session) == 1:
305 305 return exist_in_session[0]
306 306 log.exception(
307 307 'multiple objects with attr %s and '
308 308 'value %s found with same name: %r',
309 309 attr_name, value, exist_in_session)
310 310
311 311 def __repr__(self):
312 312 if hasattr(self, '__unicode__'):
313 313 # python repr needs to return str
314 314 try:
315 315 return safe_str(self.__unicode__())
316 316 except UnicodeDecodeError:
317 317 pass
318 318 return '<DB:%s>' % (self.__class__.__name__)
319 319
320 320
321 321 class RhodeCodeSetting(Base, BaseModel):
322 322 __tablename__ = 'rhodecode_settings'
323 323 __table_args__ = (
324 324 UniqueConstraint('app_settings_name'),
325 325 base_table_args
326 326 )
327 327
328 328 SETTINGS_TYPES = {
329 329 'str': safe_str,
330 330 'int': safe_int,
331 331 'unicode': safe_unicode,
332 332 'bool': str2bool,
333 333 'list': functools.partial(aslist, sep=',')
334 334 }
335 335 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
336 336 GLOBAL_CONF_KEY = 'app_settings'
337 337
338 338 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
339 339 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
340 340 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
341 341 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
342 342
343 343 def __init__(self, key='', val='', type='unicode'):
344 344 self.app_settings_name = key
345 345 self.app_settings_type = type
346 346 self.app_settings_value = val
347 347
348 348 @validates('_app_settings_value')
349 349 def validate_settings_value(self, key, val):
350 350 assert type(val) == unicode
351 351 return val
352 352
353 353 @hybrid_property
354 354 def app_settings_value(self):
355 355 v = self._app_settings_value
356 356 _type = self.app_settings_type
357 357 if _type:
358 358 _type = self.app_settings_type.split('.')[0]
359 359 # decode the encrypted value
360 360 if 'encrypted' in self.app_settings_type:
361 361 cipher = EncryptedTextValue()
362 362 v = safe_unicode(cipher.process_result_value(v, None))
363 363
364 364 converter = self.SETTINGS_TYPES.get(_type) or \
365 365 self.SETTINGS_TYPES['unicode']
366 366 return converter(v)
367 367
368 368 @app_settings_value.setter
369 369 def app_settings_value(self, val):
370 370 """
371 371 Setter that will always make sure we use unicode in app_settings_value
372 372
373 373 :param val:
374 374 """
375 375 val = safe_unicode(val)
376 376 # encode the encrypted value
377 377 if 'encrypted' in self.app_settings_type:
378 378 cipher = EncryptedTextValue()
379 379 val = safe_unicode(cipher.process_bind_param(val, None))
380 380 self._app_settings_value = val
381 381
382 382 @hybrid_property
383 383 def app_settings_type(self):
384 384 return self._app_settings_type
385 385
386 386 @app_settings_type.setter
387 387 def app_settings_type(self, val):
388 388 if val.split('.')[0] not in self.SETTINGS_TYPES:
389 389 raise Exception('type must be one of %s got %s'
390 390 % (self.SETTINGS_TYPES.keys(), val))
391 391 self._app_settings_type = val
392 392
393 393 @classmethod
394 394 def get_by_prefix(cls, prefix):
395 395 return RhodeCodeSetting.query()\
396 396 .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\
397 397 .all()
398 398
399 399 def __unicode__(self):
400 400 return u"<%s('%s:%s[%s]')>" % (
401 401 self.__class__.__name__,
402 402 self.app_settings_name, self.app_settings_value,
403 403 self.app_settings_type
404 404 )
405 405
406 406
407 407 class RhodeCodeUi(Base, BaseModel):
408 408 __tablename__ = 'rhodecode_ui'
409 409 __table_args__ = (
410 410 UniqueConstraint('ui_key'),
411 411 base_table_args
412 412 )
413 413
414 414 HOOK_REPO_SIZE = 'changegroup.repo_size'
415 415 # HG
416 416 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
417 417 HOOK_PULL = 'outgoing.pull_logger'
418 418 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
419 419 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
420 420 HOOK_PUSH = 'changegroup.push_logger'
421 421 HOOK_PUSH_KEY = 'pushkey.key_push'
422 422
423 423 HOOKS_BUILTIN = [
424 424 HOOK_PRE_PULL,
425 425 HOOK_PULL,
426 426 HOOK_PRE_PUSH,
427 427 HOOK_PRETX_PUSH,
428 428 HOOK_PUSH,
429 429 HOOK_PUSH_KEY,
430 430 ]
431 431
432 432 # TODO: johbo: Unify way how hooks are configured for git and hg,
433 433 # git part is currently hardcoded.
434 434
435 435 # SVN PATTERNS
436 436 SVN_BRANCH_ID = 'vcs_svn_branch'
437 437 SVN_TAG_ID = 'vcs_svn_tag'
438 438
439 439 ui_id = Column(
440 440 "ui_id", Integer(), nullable=False, unique=True, default=None,
441 441 primary_key=True)
442 442 ui_section = Column(
443 443 "ui_section", String(255), nullable=True, unique=None, default=None)
444 444 ui_key = Column(
445 445 "ui_key", String(255), nullable=True, unique=None, default=None)
446 446 ui_value = Column(
447 447 "ui_value", String(255), nullable=True, unique=None, default=None)
448 448 ui_active = Column(
449 449 "ui_active", Boolean(), nullable=True, unique=None, default=True)
450 450
451 451 def __repr__(self):
452 452 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
453 453 self.ui_key, self.ui_value)
454 454
455 455
456 456 class RepoRhodeCodeSetting(Base, BaseModel):
457 457 __tablename__ = 'repo_rhodecode_settings'
458 458 __table_args__ = (
459 459 UniqueConstraint(
460 460 'app_settings_name', 'repository_id',
461 461 name='uq_repo_rhodecode_setting_name_repo_id'),
462 462 base_table_args
463 463 )
464 464
465 465 repository_id = Column(
466 466 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
467 467 nullable=False)
468 468 app_settings_id = Column(
469 469 "app_settings_id", Integer(), nullable=False, unique=True,
470 470 default=None, primary_key=True)
471 471 app_settings_name = Column(
472 472 "app_settings_name", String(255), nullable=True, unique=None,
473 473 default=None)
474 474 _app_settings_value = Column(
475 475 "app_settings_value", String(4096), nullable=True, unique=None,
476 476 default=None)
477 477 _app_settings_type = Column(
478 478 "app_settings_type", String(255), nullable=True, unique=None,
479 479 default=None)
480 480
481 481 repository = relationship('Repository')
482 482
483 483 def __init__(self, repository_id, key='', val='', type='unicode'):
484 484 self.repository_id = repository_id
485 485 self.app_settings_name = key
486 486 self.app_settings_type = type
487 487 self.app_settings_value = val
488 488
489 489 @validates('_app_settings_value')
490 490 def validate_settings_value(self, key, val):
491 491 assert type(val) == unicode
492 492 return val
493 493
494 494 @hybrid_property
495 495 def app_settings_value(self):
496 496 v = self._app_settings_value
497 497 type_ = self.app_settings_type
498 498 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
499 499 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
500 500 return converter(v)
501 501
502 502 @app_settings_value.setter
503 503 def app_settings_value(self, val):
504 504 """
505 505 Setter that will always make sure we use unicode in app_settings_value
506 506
507 507 :param val:
508 508 """
509 509 self._app_settings_value = safe_unicode(val)
510 510
511 511 @hybrid_property
512 512 def app_settings_type(self):
513 513 return self._app_settings_type
514 514
515 515 @app_settings_type.setter
516 516 def app_settings_type(self, val):
517 517 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
518 518 if val not in SETTINGS_TYPES:
519 519 raise Exception('type must be one of %s got %s'
520 520 % (SETTINGS_TYPES.keys(), val))
521 521 self._app_settings_type = val
522 522
523 523 def __unicode__(self):
524 524 return u"<%s('%s:%s:%s[%s]')>" % (
525 525 self.__class__.__name__, self.repository.repo_name,
526 526 self.app_settings_name, self.app_settings_value,
527 527 self.app_settings_type
528 528 )
529 529
530 530
531 531 class RepoRhodeCodeUi(Base, BaseModel):
532 532 __tablename__ = 'repo_rhodecode_ui'
533 533 __table_args__ = (
534 534 UniqueConstraint(
535 535 'repository_id', 'ui_section', 'ui_key',
536 536 name='uq_repo_rhodecode_ui_repository_id_section_key'),
537 537 base_table_args
538 538 )
539 539
540 540 repository_id = Column(
541 541 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
542 542 nullable=False)
543 543 ui_id = Column(
544 544 "ui_id", Integer(), nullable=False, unique=True, default=None,
545 545 primary_key=True)
546 546 ui_section = Column(
547 547 "ui_section", String(255), nullable=True, unique=None, default=None)
548 548 ui_key = Column(
549 549 "ui_key", String(255), nullable=True, unique=None, default=None)
550 550 ui_value = Column(
551 551 "ui_value", String(255), nullable=True, unique=None, default=None)
552 552 ui_active = Column(
553 553 "ui_active", Boolean(), nullable=True, unique=None, default=True)
554 554
555 555 repository = relationship('Repository')
556 556
557 557 def __repr__(self):
558 558 return '<%s[%s:%s]%s=>%s]>' % (
559 559 self.__class__.__name__, self.repository.repo_name,
560 560 self.ui_section, self.ui_key, self.ui_value)
561 561
562 562
563 563 class User(Base, BaseModel):
564 564 __tablename__ = 'users'
565 565 __table_args__ = (
566 566 UniqueConstraint('username'), UniqueConstraint('email'),
567 567 Index('u_username_idx', 'username'),
568 568 Index('u_email_idx', 'email'),
569 569 base_table_args
570 570 )
571 571
572 572 DEFAULT_USER = 'default'
573 573 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
574 574 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
575 575
576 576 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
577 577 username = Column("username", String(255), nullable=True, unique=None, default=None)
578 578 password = Column("password", String(255), nullable=True, unique=None, default=None)
579 579 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
580 580 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
581 581 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
582 582 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
583 583 _email = Column("email", String(255), nullable=True, unique=None, default=None)
584 584 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
585 585 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
586 586 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
587 587
588 588 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
589 589 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
590 590 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
591 591 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
592 592 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
593 593 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
594 594
595 595 user_log = relationship('UserLog')
596 596 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan')
597 597
598 598 repositories = relationship('Repository')
599 599 repository_groups = relationship('RepoGroup')
600 600 user_groups = relationship('UserGroup')
601 601
602 602 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
603 603 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
604 604
605 605 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan')
606 606 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
607 607 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan')
608 608
609 609 group_member = relationship('UserGroupMember', cascade='all')
610 610
611 611 notifications = relationship('UserNotification', cascade='all')
612 612 # notifications assigned to this user
613 613 user_created_notifications = relationship('Notification', cascade='all')
614 614 # comments created by this user
615 615 user_comments = relationship('ChangesetComment', cascade='all')
616 616 # user profile extra info
617 617 user_emails = relationship('UserEmailMap', cascade='all')
618 618 user_ip_map = relationship('UserIpMap', cascade='all')
619 619 user_auth_tokens = relationship('UserApiKeys', cascade='all')
620 620 user_ssh_keys = relationship('UserSshKeys', cascade='all')
621 621
622 622 # gists
623 623 user_gists = relationship('Gist', cascade='all')
624 624 # user pull requests
625 625 user_pull_requests = relationship('PullRequest', cascade='all')
626 626
627 627 # external identities
628 628 external_identities = relationship(
629 629 'ExternalIdentity',
630 630 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
631 631 cascade='all')
632 632 # review rules
633 633 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
634 634
635 635 # artifacts owned
636 636 artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id')
637 637
638 638 # no cascade, set NULL
639 639 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id')
640 640
641 641 def __unicode__(self):
642 642 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
643 643 self.user_id, self.username)
644 644
645 645 @hybrid_property
646 646 def email(self):
647 647 return self._email
648 648
649 649 @email.setter
650 650 def email(self, val):
651 651 self._email = val.lower() if val else None
652 652
653 653 @hybrid_property
654 654 def first_name(self):
655 655 from rhodecode.lib import helpers as h
656 656 if self.name:
657 657 return h.escape(self.name)
658 658 return self.name
659 659
660 660 @hybrid_property
661 661 def last_name(self):
662 662 from rhodecode.lib import helpers as h
663 663 if self.lastname:
664 664 return h.escape(self.lastname)
665 665 return self.lastname
666 666
667 667 @hybrid_property
668 668 def api_key(self):
669 669 """
670 670 Fetch if exist an auth-token with role ALL connected to this user
671 671 """
672 672 user_auth_token = UserApiKeys.query()\
673 673 .filter(UserApiKeys.user_id == self.user_id)\
674 674 .filter(or_(UserApiKeys.expires == -1,
675 675 UserApiKeys.expires >= time.time()))\
676 676 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
677 677 if user_auth_token:
678 678 user_auth_token = user_auth_token.api_key
679 679
680 680 return user_auth_token
681 681
682 682 @api_key.setter
683 683 def api_key(self, val):
684 684 # don't allow to set API key this is deprecated for now
685 685 self._api_key = None
686 686
687 687 @property
688 688 def reviewer_pull_requests(self):
689 689 return PullRequestReviewers.query() \
690 690 .options(joinedload(PullRequestReviewers.pull_request)) \
691 691 .filter(PullRequestReviewers.user_id == self.user_id) \
692 692 .all()
693 693
694 694 @property
695 695 def firstname(self):
696 696 # alias for future
697 697 return self.name
698 698
699 699 @property
700 700 def emails(self):
701 701 other = UserEmailMap.query()\
702 702 .filter(UserEmailMap.user == self) \
703 703 .order_by(UserEmailMap.email_id.asc()) \
704 704 .all()
705 705 return [self.email] + [x.email for x in other]
706 706
707 707 def emails_cached(self):
708 708 emails = UserEmailMap.query()\
709 709 .filter(UserEmailMap.user == self) \
710 710 .order_by(UserEmailMap.email_id.asc())
711 711
712 712 emails = emails.options(
713 713 FromCache("sql_cache_short", "get_user_{}_emails".format(self.user_id))
714 714 )
715 715
716 716 return [self.email] + [x.email for x in emails]
717 717
718 718 @property
719 719 def auth_tokens(self):
720 720 auth_tokens = self.get_auth_tokens()
721 721 return [x.api_key for x in auth_tokens]
722 722
723 723 def get_auth_tokens(self):
724 724 return UserApiKeys.query()\
725 725 .filter(UserApiKeys.user == self)\
726 726 .order_by(UserApiKeys.user_api_key_id.asc())\
727 727 .all()
728 728
729 729 @LazyProperty
730 730 def feed_token(self):
731 731 return self.get_feed_token()
732 732
733 733 def get_feed_token(self, cache=True):
734 734 feed_tokens = UserApiKeys.query()\
735 735 .filter(UserApiKeys.user == self)\
736 736 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
737 737 if cache:
738 738 feed_tokens = feed_tokens.options(
739 739 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
740 740
741 741 feed_tokens = feed_tokens.all()
742 742 if feed_tokens:
743 743 return feed_tokens[0].api_key
744 744 return 'NO_FEED_TOKEN_AVAILABLE'
745 745
746 746 @LazyProperty
747 747 def artifact_token(self):
748 748 return self.get_artifact_token()
749 749
750 750 def get_artifact_token(self, cache=True):
751 751 artifacts_tokens = UserApiKeys.query()\
752 752 .filter(UserApiKeys.user == self) \
753 753 .filter(or_(UserApiKeys.expires == -1,
754 754 UserApiKeys.expires >= time.time())) \
755 755 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
756 756
757 757 if cache:
758 758 artifacts_tokens = artifacts_tokens.options(
759 759 FromCache("sql_cache_short", "get_user_artifact_token_%s" % self.user_id))
760 760
761 761 artifacts_tokens = artifacts_tokens.all()
762 762 if artifacts_tokens:
763 763 return artifacts_tokens[0].api_key
764 764 return 'NO_ARTIFACT_TOKEN_AVAILABLE'
765 765
766 766 def get_or_create_artifact_token(self):
767 767 artifacts_tokens = UserApiKeys.query()\
768 768 .filter(UserApiKeys.user == self) \
769 769 .filter(or_(UserApiKeys.expires == -1,
770 770 UserApiKeys.expires >= time.time())) \
771 771 .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
772 772
773 773 artifacts_tokens = artifacts_tokens.all()
774 774 if artifacts_tokens:
775 775 return artifacts_tokens[0].api_key
776 776 else:
777 777 from rhodecode.model.auth_token import AuthTokenModel
778 778 artifact_token = AuthTokenModel().create(
779 779 self, 'auto-generated-artifact-token',
780 780 lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD)
781 781 Session.commit()
782 782 return artifact_token.api_key
783 783
784 784 @classmethod
785 785 def get(cls, user_id, cache=False):
786 786 if not user_id:
787 787 return
788 788
789 789 user = cls.query()
790 790 if cache:
791 791 user = user.options(
792 792 FromCache("sql_cache_short", "get_users_%s" % user_id))
793 793 return user.get(user_id)
794 794
795 795 @classmethod
796 796 def extra_valid_auth_tokens(cls, user, role=None):
797 797 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
798 798 .filter(or_(UserApiKeys.expires == -1,
799 799 UserApiKeys.expires >= time.time()))
800 800 if role:
801 801 tokens = tokens.filter(or_(UserApiKeys.role == role,
802 802 UserApiKeys.role == UserApiKeys.ROLE_ALL))
803 803 return tokens.all()
804 804
805 805 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
806 806 from rhodecode.lib import auth
807 807
808 808 log.debug('Trying to authenticate user: %s via auth-token, '
809 809 'and roles: %s', self, roles)
810 810
811 811 if not auth_token:
812 812 return False
813 813
814 814 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
815 815 tokens_q = UserApiKeys.query()\
816 816 .filter(UserApiKeys.user_id == self.user_id)\
817 817 .filter(or_(UserApiKeys.expires == -1,
818 818 UserApiKeys.expires >= time.time()))
819 819
820 820 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
821 821
822 822 crypto_backend = auth.crypto_backend()
823 823 enc_token_map = {}
824 824 plain_token_map = {}
825 825 for token in tokens_q:
826 826 if token.api_key.startswith(crypto_backend.ENC_PREF):
827 827 enc_token_map[token.api_key] = token
828 828 else:
829 829 plain_token_map[token.api_key] = token
830 830 log.debug(
831 831 'Found %s plain and %s encrypted tokens to check for authentication for this user',
832 832 len(plain_token_map), len(enc_token_map))
833 833
834 834 # plain token match comes first
835 835 match = plain_token_map.get(auth_token)
836 836
837 837 # check encrypted tokens now
838 838 if not match:
839 839 for token_hash, token in enc_token_map.items():
840 840 # NOTE(marcink): this is expensive to calculate, but most secure
841 841 if crypto_backend.hash_check(auth_token, token_hash):
842 842 match = token
843 843 break
844 844
845 845 if match:
846 846 log.debug('Found matching token %s', match)
847 847 if match.repo_id:
848 848 log.debug('Found scope, checking for scope match of token %s', match)
849 849 if match.repo_id == scope_repo_id:
850 850 return True
851 851 else:
852 852 log.debug(
853 853 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, '
854 854 'and calling scope is:%s, skipping further checks',
855 855 match.repo, scope_repo_id)
856 856 return False
857 857 else:
858 858 return True
859 859
860 860 return False
861 861
862 862 @property
863 863 def ip_addresses(self):
864 864 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
865 865 return [x.ip_addr for x in ret]
866 866
867 867 @property
868 868 def username_and_name(self):
869 869 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
870 870
871 871 @property
872 872 def username_or_name_or_email(self):
873 873 full_name = self.full_name if self.full_name is not ' ' else None
874 874 return self.username or full_name or self.email
875 875
876 876 @property
877 877 def full_name(self):
878 878 return '%s %s' % (self.first_name, self.last_name)
879 879
880 880 @property
881 881 def full_name_or_username(self):
882 882 return ('%s %s' % (self.first_name, self.last_name)
883 883 if (self.first_name and self.last_name) else self.username)
884 884
885 885 @property
886 886 def full_contact(self):
887 887 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
888 888
889 889 @property
890 890 def short_contact(self):
891 891 return '%s %s' % (self.first_name, self.last_name)
892 892
893 893 @property
894 894 def is_admin(self):
895 895 return self.admin
896 896
897 897 @property
898 898 def language(self):
899 899 return self.user_data.get('language')
900 900
901 901 def AuthUser(self, **kwargs):
902 902 """
903 903 Returns instance of AuthUser for this user
904 904 """
905 905 from rhodecode.lib.auth import AuthUser
906 906 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
907 907
908 908 @hybrid_property
909 909 def user_data(self):
910 910 if not self._user_data:
911 911 return {}
912 912
913 913 try:
914 914 return json.loads(self._user_data)
915 915 except TypeError:
916 916 return {}
917 917
918 918 @user_data.setter
919 919 def user_data(self, val):
920 920 if not isinstance(val, dict):
921 921 raise Exception('user_data must be dict, got %s' % type(val))
922 922 try:
923 923 self._user_data = json.dumps(val)
924 924 except Exception:
925 925 log.error(traceback.format_exc())
926 926
927 927 @classmethod
928 928 def get_by_username(cls, username, case_insensitive=False,
929 929 cache=False, identity_cache=False):
930 930 session = Session()
931 931
932 932 if case_insensitive:
933 933 q = cls.query().filter(
934 934 func.lower(cls.username) == func.lower(username))
935 935 else:
936 936 q = cls.query().filter(cls.username == username)
937 937
938 938 if cache:
939 939 if identity_cache:
940 940 val = cls.identity_cache(session, 'username', username)
941 941 if val:
942 942 return val
943 943 else:
944 944 cache_key = "get_user_by_name_%s" % _hash_key(username)
945 945 q = q.options(
946 946 FromCache("sql_cache_short", cache_key))
947 947
948 948 return q.scalar()
949 949
950 950 @classmethod
951 951 def get_by_auth_token(cls, auth_token, cache=False):
952 952 q = UserApiKeys.query()\
953 953 .filter(UserApiKeys.api_key == auth_token)\
954 954 .filter(or_(UserApiKeys.expires == -1,
955 955 UserApiKeys.expires >= time.time()))
956 956 if cache:
957 957 q = q.options(
958 958 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
959 959
960 960 match = q.first()
961 961 if match:
962 962 return match.user
963 963
964 964 @classmethod
965 965 def get_by_email(cls, email, case_insensitive=False, cache=False):
966 966
967 967 if case_insensitive:
968 968 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
969 969
970 970 else:
971 971 q = cls.query().filter(cls.email == email)
972 972
973 973 email_key = _hash_key(email)
974 974 if cache:
975 975 q = q.options(
976 976 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
977 977
978 978 ret = q.scalar()
979 979 if ret is None:
980 980 q = UserEmailMap.query()
981 981 # try fetching in alternate email map
982 982 if case_insensitive:
983 983 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
984 984 else:
985 985 q = q.filter(UserEmailMap.email == email)
986 986 q = q.options(joinedload(UserEmailMap.user))
987 987 if cache:
988 988 q = q.options(
989 989 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
990 990 ret = getattr(q.scalar(), 'user', None)
991 991
992 992 return ret
993 993
994 994 @classmethod
995 995 def get_from_cs_author(cls, author):
996 996 """
997 997 Tries to get User objects out of commit author string
998 998
999 999 :param author:
1000 1000 """
1001 1001 from rhodecode.lib.helpers import email, author_name
1002 1002 # Valid email in the attribute passed, see if they're in the system
1003 1003 _email = email(author)
1004 1004 if _email:
1005 1005 user = cls.get_by_email(_email, case_insensitive=True)
1006 1006 if user:
1007 1007 return user
1008 1008 # Maybe we can match by username?
1009 1009 _author = author_name(author)
1010 1010 user = cls.get_by_username(_author, case_insensitive=True)
1011 1011 if user:
1012 1012 return user
1013 1013
1014 1014 def update_userdata(self, **kwargs):
1015 1015 usr = self
1016 1016 old = usr.user_data
1017 1017 old.update(**kwargs)
1018 1018 usr.user_data = old
1019 1019 Session().add(usr)
1020 1020 log.debug('updated userdata with %s', kwargs)
1021 1021
1022 1022 def update_lastlogin(self):
1023 1023 """Update user lastlogin"""
1024 1024 self.last_login = datetime.datetime.now()
1025 1025 Session().add(self)
1026 1026 log.debug('updated user %s lastlogin', self.username)
1027 1027
1028 1028 def update_password(self, new_password):
1029 1029 from rhodecode.lib.auth import get_crypt_password
1030 1030
1031 1031 self.password = get_crypt_password(new_password)
1032 1032 Session().add(self)
1033 1033
1034 1034 @classmethod
1035 1035 def get_first_super_admin(cls):
1036 1036 user = User.query()\
1037 1037 .filter(User.admin == true()) \
1038 1038 .order_by(User.user_id.asc()) \
1039 1039 .first()
1040 1040
1041 1041 if user is None:
1042 1042 raise Exception('FATAL: Missing administrative account!')
1043 1043 return user
1044 1044
1045 1045 @classmethod
1046 1046 def get_all_super_admins(cls, only_active=False):
1047 1047 """
1048 1048 Returns all admin accounts sorted by username
1049 1049 """
1050 1050 qry = User.query().filter(User.admin == true()).order_by(User.username.asc())
1051 1051 if only_active:
1052 1052 qry = qry.filter(User.active == true())
1053 1053 return qry.all()
1054 1054
1055 1055 @classmethod
1056 1056 def get_all_user_ids(cls, only_active=True):
1057 1057 """
1058 1058 Returns all users IDs
1059 1059 """
1060 1060 qry = Session().query(User.user_id)
1061 1061
1062 1062 if only_active:
1063 1063 qry = qry.filter(User.active == true())
1064 1064 return [x.user_id for x in qry]
1065 1065
1066 1066 @classmethod
1067 1067 def get_default_user(cls, cache=False, refresh=False):
1068 1068 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
1069 1069 if user is None:
1070 1070 raise Exception('FATAL: Missing default account!')
1071 1071 if refresh:
1072 1072 # The default user might be based on outdated state which
1073 1073 # has been loaded from the cache.
1074 1074 # A call to refresh() ensures that the
1075 1075 # latest state from the database is used.
1076 1076 Session().refresh(user)
1077 1077 return user
1078 1078
1079 1079 @classmethod
1080 1080 def get_default_user_id(cls):
1081 1081 import rhodecode
1082 1082 return rhodecode.CONFIG['default_user_id']
1083 1083
1084 1084 def _get_default_perms(self, user, suffix=''):
1085 1085 from rhodecode.model.permission import PermissionModel
1086 1086 return PermissionModel().get_default_perms(user.user_perms, suffix)
1087 1087
1088 1088 def get_default_perms(self, suffix=''):
1089 1089 return self._get_default_perms(self, suffix)
1090 1090
1091 1091 def get_api_data(self, include_secrets=False, details='full'):
1092 1092 """
1093 1093 Common function for generating user related data for API
1094 1094
1095 1095 :param include_secrets: By default secrets in the API data will be replaced
1096 1096 by a placeholder value to prevent exposing this data by accident. In case
1097 1097 this data shall be exposed, set this flag to ``True``.
1098 1098
1099 1099 :param details: details can be 'basic|full' basic gives only a subset of
1100 1100 the available user information that includes user_id, name and emails.
1101 1101 """
1102 1102 user = self
1103 1103 user_data = self.user_data
1104 1104 data = {
1105 1105 'user_id': user.user_id,
1106 1106 'username': user.username,
1107 1107 'firstname': user.name,
1108 1108 'lastname': user.lastname,
1109 1109 'description': user.description,
1110 1110 'email': user.email,
1111 1111 'emails': user.emails,
1112 1112 }
1113 1113 if details == 'basic':
1114 1114 return data
1115 1115
1116 1116 auth_token_length = 40
1117 1117 auth_token_replacement = '*' * auth_token_length
1118 1118
1119 1119 extras = {
1120 1120 'auth_tokens': [auth_token_replacement],
1121 1121 'active': user.active,
1122 1122 'admin': user.admin,
1123 1123 'extern_type': user.extern_type,
1124 1124 'extern_name': user.extern_name,
1125 1125 'last_login': user.last_login,
1126 1126 'last_activity': user.last_activity,
1127 1127 'ip_addresses': user.ip_addresses,
1128 1128 'language': user_data.get('language')
1129 1129 }
1130 1130 data.update(extras)
1131 1131
1132 1132 if include_secrets:
1133 1133 data['auth_tokens'] = user.auth_tokens
1134 1134 return data
1135 1135
1136 1136 def __json__(self):
1137 1137 data = {
1138 1138 'full_name': self.full_name,
1139 1139 'full_name_or_username': self.full_name_or_username,
1140 1140 'short_contact': self.short_contact,
1141 1141 'full_contact': self.full_contact,
1142 1142 }
1143 1143 data.update(self.get_api_data())
1144 1144 return data
1145 1145
1146 1146
1147 1147 class UserApiKeys(Base, BaseModel):
1148 1148 __tablename__ = 'user_api_keys'
1149 1149 __table_args__ = (
1150 1150 Index('uak_api_key_idx', 'api_key'),
1151 1151 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1152 1152 base_table_args
1153 1153 )
1154 1154 __mapper_args__ = {}
1155 1155
1156 1156 # ApiKey role
1157 1157 ROLE_ALL = 'token_role_all'
1158 1158 ROLE_VCS = 'token_role_vcs'
1159 1159 ROLE_API = 'token_role_api'
1160 1160 ROLE_HTTP = 'token_role_http'
1161 1161 ROLE_FEED = 'token_role_feed'
1162 1162 ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download'
1163 1163 # The last one is ignored in the list as we only
1164 1164 # use it for one action, and cannot be created by users
1165 1165 ROLE_PASSWORD_RESET = 'token_password_reset'
1166 1166
1167 1167 ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD]
1168 1168
1169 1169 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1170 1170 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1171 1171 api_key = Column("api_key", String(255), nullable=False, unique=True)
1172 1172 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1173 1173 expires = Column('expires', Float(53), nullable=False)
1174 1174 role = Column('role', String(255), nullable=True)
1175 1175 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1176 1176
1177 1177 # scope columns
1178 1178 repo_id = Column(
1179 1179 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1180 1180 nullable=True, unique=None, default=None)
1181 1181 repo = relationship('Repository', lazy='joined')
1182 1182
1183 1183 repo_group_id = Column(
1184 1184 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1185 1185 nullable=True, unique=None, default=None)
1186 1186 repo_group = relationship('RepoGroup', lazy='joined')
1187 1187
1188 1188 user = relationship('User', lazy='joined')
1189 1189
1190 1190 def __unicode__(self):
1191 1191 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1192 1192
1193 1193 def __json__(self):
1194 1194 data = {
1195 1195 'auth_token': self.api_key,
1196 1196 'role': self.role,
1197 1197 'scope': self.scope_humanized,
1198 1198 'expired': self.expired
1199 1199 }
1200 1200 return data
1201 1201
1202 1202 def get_api_data(self, include_secrets=False):
1203 1203 data = self.__json__()
1204 1204 if include_secrets:
1205 1205 return data
1206 1206 else:
1207 1207 data['auth_token'] = self.token_obfuscated
1208 1208 return data
1209 1209
1210 1210 @hybrid_property
1211 1211 def description_safe(self):
1212 1212 from rhodecode.lib import helpers as h
1213 1213 return h.escape(self.description)
1214 1214
1215 1215 @property
1216 1216 def expired(self):
1217 1217 if self.expires == -1:
1218 1218 return False
1219 1219 return time.time() > self.expires
1220 1220
1221 1221 @classmethod
1222 1222 def _get_role_name(cls, role):
1223 1223 return {
1224 1224 cls.ROLE_ALL: _('all'),
1225 1225 cls.ROLE_HTTP: _('http/web interface'),
1226 1226 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1227 1227 cls.ROLE_API: _('api calls'),
1228 1228 cls.ROLE_FEED: _('feed access'),
1229 1229 cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'),
1230 1230 }.get(role, role)
1231 1231
1232 1232 @classmethod
1233 1233 def _get_role_description(cls, role):
1234 1234 return {
1235 1235 cls.ROLE_ALL: _('Token for all actions.'),
1236 1236 cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without '
1237 1237 'login using `api_access_controllers_whitelist` functionality.'),
1238 1238 cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. '
1239 1239 'Requires auth_token authentication plugin to be active. <br/>'
1240 1240 'Such Token should be used then instead of a password to '
1241 1241 'interact with a repository, and additionally can be '
1242 1242 'limited to single repository using repo scope.'),
1243 1243 cls.ROLE_API: _('Token limited to api calls.'),
1244 1244 cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'),
1245 1245 cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'),
1246 1246 }.get(role, role)
1247 1247
1248 1248 @property
1249 1249 def role_humanized(self):
1250 1250 return self._get_role_name(self.role)
1251 1251
1252 1252 def _get_scope(self):
1253 1253 if self.repo:
1254 1254 return 'Repository: {}'.format(self.repo.repo_name)
1255 1255 if self.repo_group:
1256 1256 return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name)
1257 1257 return 'Global'
1258 1258
1259 1259 @property
1260 1260 def scope_humanized(self):
1261 1261 return self._get_scope()
1262 1262
1263 1263 @property
1264 1264 def token_obfuscated(self):
1265 1265 if self.api_key:
1266 1266 return self.api_key[:4] + "****"
1267 1267
1268 1268
1269 1269 class UserEmailMap(Base, BaseModel):
1270 1270 __tablename__ = 'user_email_map'
1271 1271 __table_args__ = (
1272 1272 Index('uem_email_idx', 'email'),
1273 1273 UniqueConstraint('email'),
1274 1274 base_table_args
1275 1275 )
1276 1276 __mapper_args__ = {}
1277 1277
1278 1278 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1279 1279 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1280 1280 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1281 1281 user = relationship('User', lazy='joined')
1282 1282
1283 1283 @validates('_email')
1284 1284 def validate_email(self, key, email):
1285 1285 # check if this email is not main one
1286 1286 main_email = Session().query(User).filter(User.email == email).scalar()
1287 1287 if main_email is not None:
1288 1288 raise AttributeError('email %s is present is user table' % email)
1289 1289 return email
1290 1290
1291 1291 @hybrid_property
1292 1292 def email(self):
1293 1293 return self._email
1294 1294
1295 1295 @email.setter
1296 1296 def email(self, val):
1297 1297 self._email = val.lower() if val else None
1298 1298
1299 1299
1300 1300 class UserIpMap(Base, BaseModel):
1301 1301 __tablename__ = 'user_ip_map'
1302 1302 __table_args__ = (
1303 1303 UniqueConstraint('user_id', 'ip_addr'),
1304 1304 base_table_args
1305 1305 )
1306 1306 __mapper_args__ = {}
1307 1307
1308 1308 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1309 1309 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1310 1310 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1311 1311 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1312 1312 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1313 1313 user = relationship('User', lazy='joined')
1314 1314
1315 1315 @hybrid_property
1316 1316 def description_safe(self):
1317 1317 from rhodecode.lib import helpers as h
1318 1318 return h.escape(self.description)
1319 1319
1320 1320 @classmethod
1321 1321 def _get_ip_range(cls, ip_addr):
1322 1322 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1323 1323 return [str(net.network_address), str(net.broadcast_address)]
1324 1324
1325 1325 def __json__(self):
1326 1326 return {
1327 1327 'ip_addr': self.ip_addr,
1328 1328 'ip_range': self._get_ip_range(self.ip_addr),
1329 1329 }
1330 1330
1331 1331 def __unicode__(self):
1332 1332 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1333 1333 self.user_id, self.ip_addr)
1334 1334
1335 1335
1336 1336 class UserSshKeys(Base, BaseModel):
1337 1337 __tablename__ = 'user_ssh_keys'
1338 1338 __table_args__ = (
1339 1339 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1340 1340
1341 1341 UniqueConstraint('ssh_key_fingerprint'),
1342 1342
1343 1343 base_table_args
1344 1344 )
1345 1345 __mapper_args__ = {}
1346 1346
1347 1347 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1348 1348 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1349 1349 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1350 1350
1351 1351 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1352 1352
1353 1353 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1354 1354 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1355 1355 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1356 1356
1357 1357 user = relationship('User', lazy='joined')
1358 1358
1359 1359 def __json__(self):
1360 1360 data = {
1361 1361 'ssh_fingerprint': self.ssh_key_fingerprint,
1362 1362 'description': self.description,
1363 1363 'created_on': self.created_on
1364 1364 }
1365 1365 return data
1366 1366
1367 1367 def get_api_data(self):
1368 1368 data = self.__json__()
1369 1369 return data
1370 1370
1371 1371
1372 1372 class UserLog(Base, BaseModel):
1373 1373 __tablename__ = 'user_logs'
1374 1374 __table_args__ = (
1375 1375 base_table_args,
1376 1376 )
1377 1377
1378 1378 VERSION_1 = 'v1'
1379 1379 VERSION_2 = 'v2'
1380 1380 VERSIONS = [VERSION_1, VERSION_2]
1381 1381
1382 1382 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1383 1383 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1384 1384 username = Column("username", String(255), nullable=True, unique=None, default=None)
1385 1385 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1386 1386 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1387 1387 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1388 1388 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1389 1389 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1390 1390
1391 1391 version = Column("version", String(255), nullable=True, default=VERSION_1)
1392 1392 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1393 1393 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1394 1394
1395 1395 def __unicode__(self):
1396 1396 return u"<%s('id:%s:%s')>" % (
1397 1397 self.__class__.__name__, self.repository_name, self.action)
1398 1398
1399 1399 def __json__(self):
1400 1400 return {
1401 1401 'user_id': self.user_id,
1402 1402 'username': self.username,
1403 1403 'repository_id': self.repository_id,
1404 1404 'repository_name': self.repository_name,
1405 1405 'user_ip': self.user_ip,
1406 1406 'action_date': self.action_date,
1407 1407 'action': self.action,
1408 1408 }
1409 1409
1410 1410 @hybrid_property
1411 1411 def entry_id(self):
1412 1412 return self.user_log_id
1413 1413
1414 1414 @property
1415 1415 def action_as_day(self):
1416 1416 return datetime.date(*self.action_date.timetuple()[:3])
1417 1417
1418 1418 user = relationship('User')
1419 1419 repository = relationship('Repository', cascade='')
1420 1420
1421 1421
1422 1422 class UserGroup(Base, BaseModel):
1423 1423 __tablename__ = 'users_groups'
1424 1424 __table_args__ = (
1425 1425 base_table_args,
1426 1426 )
1427 1427
1428 1428 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1429 1429 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1430 1430 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1431 1431 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1432 1432 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1433 1433 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1434 1434 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1435 1435 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1436 1436
1437 1437 members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined")
1438 1438 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1439 1439 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1440 1440 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1441 1441 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1442 1442 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1443 1443
1444 1444 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1445 1445 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1446 1446
1447 1447 @classmethod
1448 1448 def _load_group_data(cls, column):
1449 1449 if not column:
1450 1450 return {}
1451 1451
1452 1452 try:
1453 1453 return json.loads(column) or {}
1454 1454 except TypeError:
1455 1455 return {}
1456 1456
1457 1457 @hybrid_property
1458 1458 def description_safe(self):
1459 1459 from rhodecode.lib import helpers as h
1460 1460 return h.escape(self.user_group_description)
1461 1461
1462 1462 @hybrid_property
1463 1463 def group_data(self):
1464 1464 return self._load_group_data(self._group_data)
1465 1465
1466 1466 @group_data.expression
1467 1467 def group_data(self, **kwargs):
1468 1468 return self._group_data
1469 1469
1470 1470 @group_data.setter
1471 1471 def group_data(self, val):
1472 1472 try:
1473 1473 self._group_data = json.dumps(val)
1474 1474 except Exception:
1475 1475 log.error(traceback.format_exc())
1476 1476
1477 1477 @classmethod
1478 1478 def _load_sync(cls, group_data):
1479 1479 if group_data:
1480 1480 return group_data.get('extern_type')
1481 1481
1482 1482 @property
1483 1483 def sync(self):
1484 1484 return self._load_sync(self.group_data)
1485 1485
1486 1486 def __unicode__(self):
1487 1487 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1488 1488 self.users_group_id,
1489 1489 self.users_group_name)
1490 1490
1491 1491 @classmethod
1492 1492 def get_by_group_name(cls, group_name, cache=False,
1493 1493 case_insensitive=False):
1494 1494 if case_insensitive:
1495 1495 q = cls.query().filter(func.lower(cls.users_group_name) ==
1496 1496 func.lower(group_name))
1497 1497
1498 1498 else:
1499 1499 q = cls.query().filter(cls.users_group_name == group_name)
1500 1500 if cache:
1501 1501 q = q.options(
1502 1502 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1503 1503 return q.scalar()
1504 1504
1505 1505 @classmethod
1506 1506 def get(cls, user_group_id, cache=False):
1507 1507 if not user_group_id:
1508 1508 return
1509 1509
1510 1510 user_group = cls.query()
1511 1511 if cache:
1512 1512 user_group = user_group.options(
1513 1513 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1514 1514 return user_group.get(user_group_id)
1515 1515
1516 1516 def permissions(self, with_admins=True, with_owner=True,
1517 1517 expand_from_user_groups=False):
1518 1518 """
1519 1519 Permissions for user groups
1520 1520 """
1521 1521 _admin_perm = 'usergroup.admin'
1522 1522
1523 1523 owner_row = []
1524 1524 if with_owner:
1525 1525 usr = AttributeDict(self.user.get_dict())
1526 1526 usr.owner_row = True
1527 1527 usr.permission = _admin_perm
1528 1528 owner_row.append(usr)
1529 1529
1530 1530 super_admin_ids = []
1531 1531 super_admin_rows = []
1532 1532 if with_admins:
1533 1533 for usr in User.get_all_super_admins():
1534 1534 super_admin_ids.append(usr.user_id)
1535 1535 # if this admin is also owner, don't double the record
1536 1536 if usr.user_id == owner_row[0].user_id:
1537 1537 owner_row[0].admin_row = True
1538 1538 else:
1539 1539 usr = AttributeDict(usr.get_dict())
1540 1540 usr.admin_row = True
1541 1541 usr.permission = _admin_perm
1542 1542 super_admin_rows.append(usr)
1543 1543
1544 1544 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1545 1545 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1546 1546 joinedload(UserUserGroupToPerm.user),
1547 1547 joinedload(UserUserGroupToPerm.permission),)
1548 1548
1549 1549 # get owners and admins and permissions. We do a trick of re-writing
1550 1550 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1551 1551 # has a global reference and changing one object propagates to all
1552 1552 # others. This means if admin is also an owner admin_row that change
1553 1553 # would propagate to both objects
1554 1554 perm_rows = []
1555 1555 for _usr in q.all():
1556 1556 usr = AttributeDict(_usr.user.get_dict())
1557 1557 # if this user is also owner/admin, mark as duplicate record
1558 1558 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
1559 1559 usr.duplicate_perm = True
1560 1560 usr.permission = _usr.permission.permission_name
1561 1561 perm_rows.append(usr)
1562 1562
1563 1563 # filter the perm rows by 'default' first and then sort them by
1564 1564 # admin,write,read,none permissions sorted again alphabetically in
1565 1565 # each group
1566 1566 perm_rows = sorted(perm_rows, key=display_user_sort)
1567 1567
1568 1568 user_groups_rows = []
1569 1569 if expand_from_user_groups:
1570 1570 for ug in self.permission_user_groups(with_members=True):
1571 1571 for user_data in ug.members:
1572 1572 user_groups_rows.append(user_data)
1573 1573
1574 1574 return super_admin_rows + owner_row + perm_rows + user_groups_rows
1575 1575
1576 1576 def permission_user_groups(self, with_members=False):
1577 1577 q = UserGroupUserGroupToPerm.query()\
1578 1578 .filter(UserGroupUserGroupToPerm.target_user_group == self)
1579 1579 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1580 1580 joinedload(UserGroupUserGroupToPerm.target_user_group),
1581 1581 joinedload(UserGroupUserGroupToPerm.permission),)
1582 1582
1583 1583 perm_rows = []
1584 1584 for _user_group in q.all():
1585 1585 entry = AttributeDict(_user_group.user_group.get_dict())
1586 1586 entry.permission = _user_group.permission.permission_name
1587 1587 if with_members:
1588 1588 entry.members = [x.user.get_dict()
1589 1589 for x in _user_group.user_group.members]
1590 1590 perm_rows.append(entry)
1591 1591
1592 1592 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1593 1593 return perm_rows
1594 1594
1595 1595 def _get_default_perms(self, user_group, suffix=''):
1596 1596 from rhodecode.model.permission import PermissionModel
1597 1597 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1598 1598
1599 1599 def get_default_perms(self, suffix=''):
1600 1600 return self._get_default_perms(self, suffix)
1601 1601
1602 1602 def get_api_data(self, with_group_members=True, include_secrets=False):
1603 1603 """
1604 1604 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1605 1605 basically forwarded.
1606 1606
1607 1607 """
1608 1608 user_group = self
1609 1609 data = {
1610 1610 'users_group_id': user_group.users_group_id,
1611 1611 'group_name': user_group.users_group_name,
1612 1612 'group_description': user_group.user_group_description,
1613 1613 'active': user_group.users_group_active,
1614 1614 'owner': user_group.user.username,
1615 1615 'sync': user_group.sync,
1616 1616 'owner_email': user_group.user.email,
1617 1617 }
1618 1618
1619 1619 if with_group_members:
1620 1620 users = []
1621 1621 for user in user_group.members:
1622 1622 user = user.user
1623 1623 users.append(user.get_api_data(include_secrets=include_secrets))
1624 1624 data['users'] = users
1625 1625
1626 1626 return data
1627 1627
1628 1628
1629 1629 class UserGroupMember(Base, BaseModel):
1630 1630 __tablename__ = 'users_groups_members'
1631 1631 __table_args__ = (
1632 1632 base_table_args,
1633 1633 )
1634 1634
1635 1635 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1636 1636 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1637 1637 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1638 1638
1639 1639 user = relationship('User', lazy='joined')
1640 1640 users_group = relationship('UserGroup')
1641 1641
1642 1642 def __init__(self, gr_id='', u_id=''):
1643 1643 self.users_group_id = gr_id
1644 1644 self.user_id = u_id
1645 1645
1646 1646
1647 1647 class RepositoryField(Base, BaseModel):
1648 1648 __tablename__ = 'repositories_fields'
1649 1649 __table_args__ = (
1650 1650 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1651 1651 base_table_args,
1652 1652 )
1653 1653
1654 1654 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1655 1655
1656 1656 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1657 1657 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1658 1658 field_key = Column("field_key", String(250))
1659 1659 field_label = Column("field_label", String(1024), nullable=False)
1660 1660 field_value = Column("field_value", String(10000), nullable=False)
1661 1661 field_desc = Column("field_desc", String(1024), nullable=False)
1662 1662 field_type = Column("field_type", String(255), nullable=False, unique=None)
1663 1663 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1664 1664
1665 1665 repository = relationship('Repository')
1666 1666
1667 1667 @property
1668 1668 def field_key_prefixed(self):
1669 1669 return 'ex_%s' % self.field_key
1670 1670
1671 1671 @classmethod
1672 1672 def un_prefix_key(cls, key):
1673 1673 if key.startswith(cls.PREFIX):
1674 1674 return key[len(cls.PREFIX):]
1675 1675 return key
1676 1676
1677 1677 @classmethod
1678 1678 def get_by_key_name(cls, key, repo):
1679 1679 row = cls.query()\
1680 1680 .filter(cls.repository == repo)\
1681 1681 .filter(cls.field_key == key).scalar()
1682 1682 return row
1683 1683
1684 1684
1685 1685 class Repository(Base, BaseModel):
1686 1686 __tablename__ = 'repositories'
1687 1687 __table_args__ = (
1688 1688 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1689 1689 base_table_args,
1690 1690 )
1691 1691 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1692 1692 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1693 1693 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1694 1694
1695 1695 STATE_CREATED = 'repo_state_created'
1696 1696 STATE_PENDING = 'repo_state_pending'
1697 1697 STATE_ERROR = 'repo_state_error'
1698 1698
1699 1699 LOCK_AUTOMATIC = 'lock_auto'
1700 1700 LOCK_API = 'lock_api'
1701 1701 LOCK_WEB = 'lock_web'
1702 1702 LOCK_PULL = 'lock_pull'
1703 1703
1704 1704 NAME_SEP = URL_SEP
1705 1705
1706 1706 repo_id = Column(
1707 1707 "repo_id", Integer(), nullable=False, unique=True, default=None,
1708 1708 primary_key=True)
1709 1709 _repo_name = Column(
1710 1710 "repo_name", Text(), nullable=False, default=None)
1711 1711 repo_name_hash = Column(
1712 1712 "repo_name_hash", String(255), nullable=False, unique=True)
1713 1713 repo_state = Column("repo_state", String(255), nullable=True)
1714 1714
1715 1715 clone_uri = Column(
1716 1716 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1717 1717 default=None)
1718 1718 push_uri = Column(
1719 1719 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1720 1720 default=None)
1721 1721 repo_type = Column(
1722 1722 "repo_type", String(255), nullable=False, unique=False, default=None)
1723 1723 user_id = Column(
1724 1724 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1725 1725 unique=False, default=None)
1726 1726 private = Column(
1727 1727 "private", Boolean(), nullable=True, unique=None, default=None)
1728 1728 archived = Column(
1729 1729 "archived", Boolean(), nullable=True, unique=None, default=None)
1730 1730 enable_statistics = Column(
1731 1731 "statistics", Boolean(), nullable=True, unique=None, default=True)
1732 1732 enable_downloads = Column(
1733 1733 "downloads", Boolean(), nullable=True, unique=None, default=True)
1734 1734 description = Column(
1735 1735 "description", String(10000), nullable=True, unique=None, default=None)
1736 1736 created_on = Column(
1737 1737 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1738 1738 default=datetime.datetime.now)
1739 1739 updated_on = Column(
1740 1740 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1741 1741 default=datetime.datetime.now)
1742 1742 _landing_revision = Column(
1743 1743 "landing_revision", String(255), nullable=False, unique=False,
1744 1744 default=None)
1745 1745 enable_locking = Column(
1746 1746 "enable_locking", Boolean(), nullable=False, unique=None,
1747 1747 default=False)
1748 1748 _locked = Column(
1749 1749 "locked", String(255), nullable=True, unique=False, default=None)
1750 1750 _changeset_cache = Column(
1751 1751 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1752 1752
1753 1753 fork_id = Column(
1754 1754 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1755 1755 nullable=True, unique=False, default=None)
1756 1756 group_id = Column(
1757 1757 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1758 1758 unique=False, default=None)
1759 1759
1760 1760 user = relationship('User', lazy='joined')
1761 1761 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1762 1762 group = relationship('RepoGroup', lazy='joined')
1763 1763 repo_to_perm = relationship(
1764 1764 'UserRepoToPerm', cascade='all',
1765 1765 order_by='UserRepoToPerm.repo_to_perm_id')
1766 1766 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1767 1767 stats = relationship('Statistics', cascade='all', uselist=False)
1768 1768
1769 1769 followers = relationship(
1770 1770 'UserFollowing',
1771 1771 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1772 1772 cascade='all')
1773 1773 extra_fields = relationship(
1774 1774 'RepositoryField', cascade="all, delete-orphan")
1775 1775 logs = relationship('UserLog')
1776 1776 comments = relationship(
1777 1777 'ChangesetComment', cascade="all, delete-orphan")
1778 1778 pull_requests_source = relationship(
1779 1779 'PullRequest',
1780 1780 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1781 1781 cascade="all, delete-orphan")
1782 1782 pull_requests_target = relationship(
1783 1783 'PullRequest',
1784 1784 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1785 1785 cascade="all, delete-orphan")
1786 1786 ui = relationship('RepoRhodeCodeUi', cascade="all")
1787 1787 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1788 1788 integrations = relationship('Integration', cascade="all, delete-orphan")
1789 1789
1790 1790 scoped_tokens = relationship('UserApiKeys', cascade="all")
1791 1791
1792 1792 # no cascade, set NULL
1793 1793 artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id')
1794 1794
1795 1795 def __unicode__(self):
1796 1796 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1797 1797 safe_unicode(self.repo_name))
1798 1798
1799 1799 @hybrid_property
1800 1800 def description_safe(self):
1801 1801 from rhodecode.lib import helpers as h
1802 1802 return h.escape(self.description)
1803 1803
1804 1804 @hybrid_property
1805 1805 def landing_rev(self):
1806 1806 # always should return [rev_type, rev], e.g ['branch', 'master']
1807 1807 if self._landing_revision:
1808 1808 _rev_info = self._landing_revision.split(':')
1809 1809 if len(_rev_info) < 2:
1810 1810 _rev_info.insert(0, 'rev')
1811 1811 return [_rev_info[0], _rev_info[1]]
1812 1812 return [None, None]
1813 1813
1814 1814 @property
1815 1815 def landing_ref_type(self):
1816 1816 return self.landing_rev[0]
1817 1817
1818 1818 @property
1819 1819 def landing_ref_name(self):
1820 1820 return self.landing_rev[1]
1821 1821
1822 1822 @landing_rev.setter
1823 1823 def landing_rev(self, val):
1824 1824 if ':' not in val:
1825 1825 raise ValueError('value must be delimited with `:` and consist '
1826 1826 'of <rev_type>:<rev>, got %s instead' % val)
1827 1827 self._landing_revision = val
1828 1828
1829 1829 @hybrid_property
1830 1830 def locked(self):
1831 1831 if self._locked:
1832 1832 user_id, timelocked, reason = self._locked.split(':')
1833 1833 lock_values = int(user_id), timelocked, reason
1834 1834 else:
1835 1835 lock_values = [None, None, None]
1836 1836 return lock_values
1837 1837
1838 1838 @locked.setter
1839 1839 def locked(self, val):
1840 1840 if val and isinstance(val, (list, tuple)):
1841 1841 self._locked = ':'.join(map(str, val))
1842 1842 else:
1843 1843 self._locked = None
1844 1844
1845 1845 @classmethod
1846 1846 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
1847 1847 from rhodecode.lib.vcs.backends.base import EmptyCommit
1848 1848 dummy = EmptyCommit().__json__()
1849 1849 if not changeset_cache_raw:
1850 1850 dummy['source_repo_id'] = repo_id
1851 1851 return json.loads(json.dumps(dummy))
1852 1852
1853 1853 try:
1854 1854 return json.loads(changeset_cache_raw)
1855 1855 except TypeError:
1856 1856 return dummy
1857 1857 except Exception:
1858 1858 log.error(traceback.format_exc())
1859 1859 return dummy
1860 1860
1861 1861 @hybrid_property
1862 1862 def changeset_cache(self):
1863 1863 return self._load_changeset_cache(self.repo_id, self._changeset_cache)
1864 1864
1865 1865 @changeset_cache.setter
1866 1866 def changeset_cache(self, val):
1867 1867 try:
1868 1868 self._changeset_cache = json.dumps(val)
1869 1869 except Exception:
1870 1870 log.error(traceback.format_exc())
1871 1871
1872 1872 @hybrid_property
1873 1873 def repo_name(self):
1874 1874 return self._repo_name
1875 1875
1876 1876 @repo_name.setter
1877 1877 def repo_name(self, value):
1878 1878 self._repo_name = value
1879 1879 self.repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1880 1880
1881 1881 @classmethod
1882 1882 def normalize_repo_name(cls, repo_name):
1883 1883 """
1884 1884 Normalizes os specific repo_name to the format internally stored inside
1885 1885 database using URL_SEP
1886 1886
1887 1887 :param cls:
1888 1888 :param repo_name:
1889 1889 """
1890 1890 return cls.NAME_SEP.join(repo_name.split(os.sep))
1891 1891
1892 1892 @classmethod
1893 1893 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1894 1894 session = Session()
1895 1895 q = session.query(cls).filter(cls.repo_name == repo_name)
1896 1896
1897 1897 if cache:
1898 1898 if identity_cache:
1899 1899 val = cls.identity_cache(session, 'repo_name', repo_name)
1900 1900 if val:
1901 1901 return val
1902 1902 else:
1903 1903 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1904 1904 q = q.options(
1905 1905 FromCache("sql_cache_short", cache_key))
1906 1906
1907 1907 return q.scalar()
1908 1908
1909 1909 @classmethod
1910 1910 def get_by_id_or_repo_name(cls, repoid):
1911 1911 if isinstance(repoid, (int, long)):
1912 1912 try:
1913 1913 repo = cls.get(repoid)
1914 1914 except ValueError:
1915 1915 repo = None
1916 1916 else:
1917 1917 repo = cls.get_by_repo_name(repoid)
1918 1918 return repo
1919 1919
1920 1920 @classmethod
1921 1921 def get_by_full_path(cls, repo_full_path):
1922 1922 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1923 1923 repo_name = cls.normalize_repo_name(repo_name)
1924 1924 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1925 1925
1926 1926 @classmethod
1927 1927 def get_repo_forks(cls, repo_id):
1928 1928 return cls.query().filter(Repository.fork_id == repo_id)
1929 1929
1930 1930 @classmethod
1931 1931 def base_path(cls):
1932 1932 """
1933 1933 Returns base path when all repos are stored
1934 1934
1935 1935 :param cls:
1936 1936 """
1937 1937 q = Session().query(RhodeCodeUi)\
1938 1938 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1939 1939 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1940 1940 return q.one().ui_value
1941 1941
1942 1942 @classmethod
1943 1943 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1944 1944 case_insensitive=True, archived=False):
1945 1945 q = Repository.query()
1946 1946
1947 1947 if not archived:
1948 1948 q = q.filter(Repository.archived.isnot(true()))
1949 1949
1950 1950 if not isinstance(user_id, Optional):
1951 1951 q = q.filter(Repository.user_id == user_id)
1952 1952
1953 1953 if not isinstance(group_id, Optional):
1954 1954 q = q.filter(Repository.group_id == group_id)
1955 1955
1956 1956 if case_insensitive:
1957 1957 q = q.order_by(func.lower(Repository.repo_name))
1958 1958 else:
1959 1959 q = q.order_by(Repository.repo_name)
1960 1960
1961 1961 return q.all()
1962 1962
1963 1963 @property
1964 1964 def repo_uid(self):
1965 1965 return '_{}'.format(self.repo_id)
1966 1966
1967 1967 @property
1968 1968 def forks(self):
1969 1969 """
1970 1970 Return forks of this repo
1971 1971 """
1972 1972 return Repository.get_repo_forks(self.repo_id)
1973 1973
1974 1974 @property
1975 1975 def parent(self):
1976 1976 """
1977 1977 Returns fork parent
1978 1978 """
1979 1979 return self.fork
1980 1980
1981 1981 @property
1982 1982 def just_name(self):
1983 1983 return self.repo_name.split(self.NAME_SEP)[-1]
1984 1984
1985 1985 @property
1986 1986 def groups_with_parents(self):
1987 1987 groups = []
1988 1988 if self.group is None:
1989 1989 return groups
1990 1990
1991 1991 cur_gr = self.group
1992 1992 groups.insert(0, cur_gr)
1993 1993 while 1:
1994 1994 gr = getattr(cur_gr, 'parent_group', None)
1995 1995 cur_gr = cur_gr.parent_group
1996 1996 if gr is None:
1997 1997 break
1998 1998 groups.insert(0, gr)
1999 1999
2000 2000 return groups
2001 2001
2002 2002 @property
2003 2003 def groups_and_repo(self):
2004 2004 return self.groups_with_parents, self
2005 2005
2006 2006 @LazyProperty
2007 2007 def repo_path(self):
2008 2008 """
2009 2009 Returns base full path for that repository means where it actually
2010 2010 exists on a filesystem
2011 2011 """
2012 2012 q = Session().query(RhodeCodeUi).filter(
2013 2013 RhodeCodeUi.ui_key == self.NAME_SEP)
2014 2014 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
2015 2015 return q.one().ui_value
2016 2016
2017 2017 @property
2018 2018 def repo_full_path(self):
2019 2019 p = [self.repo_path]
2020 2020 # we need to split the name by / since this is how we store the
2021 2021 # names in the database, but that eventually needs to be converted
2022 2022 # into a valid system path
2023 2023 p += self.repo_name.split(self.NAME_SEP)
2024 2024 return os.path.join(*map(safe_unicode, p))
2025 2025
2026 2026 @property
2027 2027 def cache_keys(self):
2028 2028 """
2029 2029 Returns associated cache keys for that repo
2030 2030 """
2031 2031 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2032 2032 repo_id=self.repo_id)
2033 2033 return CacheKey.query()\
2034 2034 .filter(CacheKey.cache_args == invalidation_namespace)\
2035 2035 .order_by(CacheKey.cache_key)\
2036 2036 .all()
2037 2037
2038 2038 @property
2039 2039 def cached_diffs_relative_dir(self):
2040 2040 """
2041 2041 Return a relative to the repository store path of cached diffs
2042 2042 used for safe display for users, who shouldn't know the absolute store
2043 2043 path
2044 2044 """
2045 2045 return os.path.join(
2046 2046 os.path.dirname(self.repo_name),
2047 2047 self.cached_diffs_dir.split(os.path.sep)[-1])
2048 2048
2049 2049 @property
2050 2050 def cached_diffs_dir(self):
2051 2051 path = self.repo_full_path
2052 2052 return os.path.join(
2053 2053 os.path.dirname(path),
2054 2054 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
2055 2055
2056 2056 def cached_diffs(self):
2057 2057 diff_cache_dir = self.cached_diffs_dir
2058 2058 if os.path.isdir(diff_cache_dir):
2059 2059 return os.listdir(diff_cache_dir)
2060 2060 return []
2061 2061
2062 2062 def shadow_repos(self):
2063 2063 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
2064 2064 return [
2065 2065 x for x in os.listdir(os.path.dirname(self.repo_full_path))
2066 2066 if x.startswith(shadow_repos_pattern)]
2067 2067
2068 2068 def get_new_name(self, repo_name):
2069 2069 """
2070 2070 returns new full repository name based on assigned group and new new
2071 2071
2072 2072 :param group_name:
2073 2073 """
2074 2074 path_prefix = self.group.full_path_splitted if self.group else []
2075 2075 return self.NAME_SEP.join(path_prefix + [repo_name])
2076 2076
2077 2077 @property
2078 2078 def _config(self):
2079 2079 """
2080 2080 Returns db based config object.
2081 2081 """
2082 2082 from rhodecode.lib.utils import make_db_config
2083 2083 return make_db_config(clear_session=False, repo=self)
2084 2084
2085 2085 def permissions(self, with_admins=True, with_owner=True,
2086 2086 expand_from_user_groups=False):
2087 2087 """
2088 2088 Permissions for repositories
2089 2089 """
2090 2090 _admin_perm = 'repository.admin'
2091 2091
2092 2092 owner_row = []
2093 2093 if with_owner:
2094 2094 usr = AttributeDict(self.user.get_dict())
2095 2095 usr.owner_row = True
2096 2096 usr.permission = _admin_perm
2097 2097 usr.permission_id = None
2098 2098 owner_row.append(usr)
2099 2099
2100 2100 super_admin_ids = []
2101 2101 super_admin_rows = []
2102 2102 if with_admins:
2103 2103 for usr in User.get_all_super_admins():
2104 2104 super_admin_ids.append(usr.user_id)
2105 2105 # if this admin is also owner, don't double the record
2106 2106 if usr.user_id == owner_row[0].user_id:
2107 2107 owner_row[0].admin_row = True
2108 2108 else:
2109 2109 usr = AttributeDict(usr.get_dict())
2110 2110 usr.admin_row = True
2111 2111 usr.permission = _admin_perm
2112 2112 usr.permission_id = None
2113 2113 super_admin_rows.append(usr)
2114 2114
2115 2115 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
2116 2116 q = q.options(joinedload(UserRepoToPerm.repository),
2117 2117 joinedload(UserRepoToPerm.user),
2118 2118 joinedload(UserRepoToPerm.permission),)
2119 2119
2120 2120 # get owners and admins and permissions. We do a trick of re-writing
2121 2121 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2122 2122 # has a global reference and changing one object propagates to all
2123 2123 # others. This means if admin is also an owner admin_row that change
2124 2124 # would propagate to both objects
2125 2125 perm_rows = []
2126 2126 for _usr in q.all():
2127 2127 usr = AttributeDict(_usr.user.get_dict())
2128 2128 # if this user is also owner/admin, mark as duplicate record
2129 2129 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
2130 2130 usr.duplicate_perm = True
2131 2131 # also check if this permission is maybe used by branch_permissions
2132 2132 if _usr.branch_perm_entry:
2133 2133 usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry]
2134 2134
2135 2135 usr.permission = _usr.permission.permission_name
2136 2136 usr.permission_id = _usr.repo_to_perm_id
2137 2137 perm_rows.append(usr)
2138 2138
2139 2139 # filter the perm rows by 'default' first and then sort them by
2140 2140 # admin,write,read,none permissions sorted again alphabetically in
2141 2141 # each group
2142 2142 perm_rows = sorted(perm_rows, key=display_user_sort)
2143 2143
2144 2144 user_groups_rows = []
2145 2145 if expand_from_user_groups:
2146 2146 for ug in self.permission_user_groups(with_members=True):
2147 2147 for user_data in ug.members:
2148 2148 user_groups_rows.append(user_data)
2149 2149
2150 2150 return super_admin_rows + owner_row + perm_rows + user_groups_rows
2151 2151
2152 2152 def permission_user_groups(self, with_members=True):
2153 2153 q = UserGroupRepoToPerm.query()\
2154 2154 .filter(UserGroupRepoToPerm.repository == self)
2155 2155 q = q.options(joinedload(UserGroupRepoToPerm.repository),
2156 2156 joinedload(UserGroupRepoToPerm.users_group),
2157 2157 joinedload(UserGroupRepoToPerm.permission),)
2158 2158
2159 2159 perm_rows = []
2160 2160 for _user_group in q.all():
2161 2161 entry = AttributeDict(_user_group.users_group.get_dict())
2162 2162 entry.permission = _user_group.permission.permission_name
2163 2163 if with_members:
2164 2164 entry.members = [x.user.get_dict()
2165 2165 for x in _user_group.users_group.members]
2166 2166 perm_rows.append(entry)
2167 2167
2168 2168 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2169 2169 return perm_rows
2170 2170
2171 2171 def get_api_data(self, include_secrets=False):
2172 2172 """
2173 2173 Common function for generating repo api data
2174 2174
2175 2175 :param include_secrets: See :meth:`User.get_api_data`.
2176 2176
2177 2177 """
2178 2178 # TODO: mikhail: Here there is an anti-pattern, we probably need to
2179 2179 # move this methods on models level.
2180 2180 from rhodecode.model.settings import SettingsModel
2181 2181 from rhodecode.model.repo import RepoModel
2182 2182
2183 2183 repo = self
2184 2184 _user_id, _time, _reason = self.locked
2185 2185
2186 2186 data = {
2187 2187 'repo_id': repo.repo_id,
2188 2188 'repo_name': repo.repo_name,
2189 2189 'repo_type': repo.repo_type,
2190 2190 'clone_uri': repo.clone_uri or '',
2191 2191 'push_uri': repo.push_uri or '',
2192 2192 'url': RepoModel().get_url(self),
2193 2193 'private': repo.private,
2194 2194 'created_on': repo.created_on,
2195 2195 'description': repo.description_safe,
2196 2196 'landing_rev': repo.landing_rev,
2197 2197 'owner': repo.user.username,
2198 2198 'fork_of': repo.fork.repo_name if repo.fork else None,
2199 2199 'fork_of_id': repo.fork.repo_id if repo.fork else None,
2200 2200 'enable_statistics': repo.enable_statistics,
2201 2201 'enable_locking': repo.enable_locking,
2202 2202 'enable_downloads': repo.enable_downloads,
2203 2203 'last_changeset': repo.changeset_cache,
2204 2204 'locked_by': User.get(_user_id).get_api_data(
2205 2205 include_secrets=include_secrets) if _user_id else None,
2206 2206 'locked_date': time_to_datetime(_time) if _time else None,
2207 2207 'lock_reason': _reason if _reason else None,
2208 2208 }
2209 2209
2210 2210 # TODO: mikhail: should be per-repo settings here
2211 2211 rc_config = SettingsModel().get_all_settings()
2212 2212 repository_fields = str2bool(
2213 2213 rc_config.get('rhodecode_repository_fields'))
2214 2214 if repository_fields:
2215 2215 for f in self.extra_fields:
2216 2216 data[f.field_key_prefixed] = f.field_value
2217 2217
2218 2218 return data
2219 2219
2220 2220 @classmethod
2221 2221 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2222 2222 if not lock_time:
2223 2223 lock_time = time.time()
2224 2224 if not lock_reason:
2225 2225 lock_reason = cls.LOCK_AUTOMATIC
2226 2226 repo.locked = [user_id, lock_time, lock_reason]
2227 2227 Session().add(repo)
2228 2228 Session().commit()
2229 2229
2230 2230 @classmethod
2231 2231 def unlock(cls, repo):
2232 2232 repo.locked = None
2233 2233 Session().add(repo)
2234 2234 Session().commit()
2235 2235
2236 2236 @classmethod
2237 2237 def getlock(cls, repo):
2238 2238 return repo.locked
2239 2239
2240 2240 def is_user_lock(self, user_id):
2241 2241 if self.lock[0]:
2242 2242 lock_user_id = safe_int(self.lock[0])
2243 2243 user_id = safe_int(user_id)
2244 2244 # both are ints, and they are equal
2245 2245 return all([lock_user_id, user_id]) and lock_user_id == user_id
2246 2246
2247 2247 return False
2248 2248
2249 2249 def get_locking_state(self, action, user_id, only_when_enabled=True):
2250 2250 """
2251 2251 Checks locking on this repository, if locking is enabled and lock is
2252 2252 present returns a tuple of make_lock, locked, locked_by.
2253 2253 make_lock can have 3 states None (do nothing) True, make lock
2254 2254 False release lock, This value is later propagated to hooks, which
2255 2255 do the locking. Think about this as signals passed to hooks what to do.
2256 2256
2257 2257 """
2258 2258 # TODO: johbo: This is part of the business logic and should be moved
2259 2259 # into the RepositoryModel.
2260 2260
2261 2261 if action not in ('push', 'pull'):
2262 2262 raise ValueError("Invalid action value: %s" % repr(action))
2263 2263
2264 2264 # defines if locked error should be thrown to user
2265 2265 currently_locked = False
2266 2266 # defines if new lock should be made, tri-state
2267 2267 make_lock = None
2268 2268 repo = self
2269 2269 user = User.get(user_id)
2270 2270
2271 2271 lock_info = repo.locked
2272 2272
2273 2273 if repo and (repo.enable_locking or not only_when_enabled):
2274 2274 if action == 'push':
2275 2275 # check if it's already locked !, if it is compare users
2276 2276 locked_by_user_id = lock_info[0]
2277 2277 if user.user_id == locked_by_user_id:
2278 2278 log.debug(
2279 2279 'Got `push` action from user %s, now unlocking', user)
2280 2280 # unlock if we have push from user who locked
2281 2281 make_lock = False
2282 2282 else:
2283 2283 # we're not the same user who locked, ban with
2284 2284 # code defined in settings (default is 423 HTTP Locked) !
2285 2285 log.debug('Repo %s is currently locked by %s', repo, user)
2286 2286 currently_locked = True
2287 2287 elif action == 'pull':
2288 2288 # [0] user [1] date
2289 2289 if lock_info[0] and lock_info[1]:
2290 2290 log.debug('Repo %s is currently locked by %s', repo, user)
2291 2291 currently_locked = True
2292 2292 else:
2293 2293 log.debug('Setting lock on repo %s by %s', repo, user)
2294 2294 make_lock = True
2295 2295
2296 2296 else:
2297 2297 log.debug('Repository %s do not have locking enabled', repo)
2298 2298
2299 2299 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2300 2300 make_lock, currently_locked, lock_info)
2301 2301
2302 2302 from rhodecode.lib.auth import HasRepoPermissionAny
2303 2303 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2304 2304 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2305 2305 # if we don't have at least write permission we cannot make a lock
2306 2306 log.debug('lock state reset back to FALSE due to lack '
2307 2307 'of at least read permission')
2308 2308 make_lock = False
2309 2309
2310 2310 return make_lock, currently_locked, lock_info
2311 2311
2312 2312 @property
2313 2313 def last_commit_cache_update_diff(self):
2314 2314 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2315 2315
2316 2316 @classmethod
2317 2317 def _load_commit_change(cls, last_commit_cache):
2318 2318 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2319 2319 empty_date = datetime.datetime.fromtimestamp(0)
2320 2320 date_latest = last_commit_cache.get('date', empty_date)
2321 2321 try:
2322 2322 return parse_datetime(date_latest)
2323 2323 except Exception:
2324 2324 return empty_date
2325 2325
2326 2326 @property
2327 2327 def last_commit_change(self):
2328 2328 return self._load_commit_change(self.changeset_cache)
2329 2329
2330 2330 @property
2331 2331 def last_db_change(self):
2332 2332 return self.updated_on
2333 2333
2334 2334 @property
2335 2335 def clone_uri_hidden(self):
2336 2336 clone_uri = self.clone_uri
2337 2337 if clone_uri:
2338 2338 import urlobject
2339 2339 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2340 2340 if url_obj.password:
2341 2341 clone_uri = url_obj.with_password('*****')
2342 2342 return clone_uri
2343 2343
2344 2344 @property
2345 2345 def push_uri_hidden(self):
2346 2346 push_uri = self.push_uri
2347 2347 if push_uri:
2348 2348 import urlobject
2349 2349 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2350 2350 if url_obj.password:
2351 2351 push_uri = url_obj.with_password('*****')
2352 2352 return push_uri
2353 2353
2354 2354 def clone_url(self, **override):
2355 2355 from rhodecode.model.settings import SettingsModel
2356 2356
2357 2357 uri_tmpl = None
2358 2358 if 'with_id' in override:
2359 2359 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2360 2360 del override['with_id']
2361 2361
2362 2362 if 'uri_tmpl' in override:
2363 2363 uri_tmpl = override['uri_tmpl']
2364 2364 del override['uri_tmpl']
2365 2365
2366 2366 ssh = False
2367 2367 if 'ssh' in override:
2368 2368 ssh = True
2369 2369 del override['ssh']
2370 2370
2371 2371 # we didn't override our tmpl from **overrides
2372 2372 request = get_current_request()
2373 2373 if not uri_tmpl:
2374 2374 if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
2375 2375 rc_config = request.call_context.rc_config
2376 2376 else:
2377 2377 rc_config = SettingsModel().get_all_settings(cache=True)
2378 2378
2379 2379 if ssh:
2380 2380 uri_tmpl = rc_config.get(
2381 2381 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2382 2382
2383 2383 else:
2384 2384 uri_tmpl = rc_config.get(
2385 2385 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2386 2386
2387 2387 return get_clone_url(request=request,
2388 2388 uri_tmpl=uri_tmpl,
2389 2389 repo_name=self.repo_name,
2390 2390 repo_id=self.repo_id,
2391 2391 repo_type=self.repo_type,
2392 2392 **override)
2393 2393
2394 2394 def set_state(self, state):
2395 2395 self.repo_state = state
2396 2396 Session().add(self)
2397 2397 #==========================================================================
2398 2398 # SCM PROPERTIES
2399 2399 #==========================================================================
2400 2400
2401 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False):
2401 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None):
2402 2402 return get_commit_safe(
2403 2403 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load,
2404 maybe_unreachable=maybe_unreachable)
2404 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
2405 2405
2406 2406 def get_changeset(self, rev=None, pre_load=None):
2407 2407 warnings.warn("Use get_commit", DeprecationWarning)
2408 2408 commit_id = None
2409 2409 commit_idx = None
2410 2410 if isinstance(rev, compat.string_types):
2411 2411 commit_id = rev
2412 2412 else:
2413 2413 commit_idx = rev
2414 2414 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2415 2415 pre_load=pre_load)
2416 2416
2417 2417 def get_landing_commit(self):
2418 2418 """
2419 2419 Returns landing commit, or if that doesn't exist returns the tip
2420 2420 """
2421 2421 _rev_type, _rev = self.landing_rev
2422 2422 commit = self.get_commit(_rev)
2423 2423 if isinstance(commit, EmptyCommit):
2424 2424 return self.get_commit()
2425 2425 return commit
2426 2426
2427 2427 def flush_commit_cache(self):
2428 2428 self.update_commit_cache(cs_cache={'raw_id':'0'})
2429 2429 self.update_commit_cache()
2430 2430
2431 2431 def update_commit_cache(self, cs_cache=None, config=None):
2432 2432 """
2433 2433 Update cache of last commit for repository
2434 2434 cache_keys should be::
2435 2435
2436 2436 source_repo_id
2437 2437 short_id
2438 2438 raw_id
2439 2439 revision
2440 2440 parents
2441 2441 message
2442 2442 date
2443 2443 author
2444 2444 updated_on
2445 2445
2446 2446 """
2447 2447 from rhodecode.lib.vcs.backends.base import BaseChangeset
2448 2448 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2449 2449 empty_date = datetime.datetime.fromtimestamp(0)
2450 2450
2451 2451 if cs_cache is None:
2452 2452 # use no-cache version here
2453 2453 try:
2454 2454 scm_repo = self.scm_instance(cache=False, config=config)
2455 2455 except VCSError:
2456 2456 scm_repo = None
2457 2457 empty = scm_repo is None or scm_repo.is_empty()
2458 2458
2459 2459 if not empty:
2460 2460 cs_cache = scm_repo.get_commit(
2461 2461 pre_load=["author", "date", "message", "parents", "branch"])
2462 2462 else:
2463 2463 cs_cache = EmptyCommit()
2464 2464
2465 2465 if isinstance(cs_cache, BaseChangeset):
2466 2466 cs_cache = cs_cache.__json__()
2467 2467
2468 2468 def is_outdated(new_cs_cache):
2469 2469 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2470 2470 new_cs_cache['revision'] != self.changeset_cache['revision']):
2471 2471 return True
2472 2472 return False
2473 2473
2474 2474 # check if we have maybe already latest cached revision
2475 2475 if is_outdated(cs_cache) or not self.changeset_cache:
2476 2476 _current_datetime = datetime.datetime.utcnow()
2477 2477 last_change = cs_cache.get('date') or _current_datetime
2478 2478 # we check if last update is newer than the new value
2479 2479 # if yes, we use the current timestamp instead. Imagine you get
2480 2480 # old commit pushed 1y ago, we'd set last update 1y to ago.
2481 2481 last_change_timestamp = datetime_to_time(last_change)
2482 2482 current_timestamp = datetime_to_time(last_change)
2483 2483 if last_change_timestamp > current_timestamp and not empty:
2484 2484 cs_cache['date'] = _current_datetime
2485 2485
2486 2486 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2487 2487 cs_cache['updated_on'] = time.time()
2488 2488 self.changeset_cache = cs_cache
2489 2489 self.updated_on = last_change
2490 2490 Session().add(self)
2491 2491 Session().commit()
2492 2492
2493 2493 else:
2494 2494 if empty:
2495 2495 cs_cache = EmptyCommit().__json__()
2496 2496 else:
2497 2497 cs_cache = self.changeset_cache
2498 2498
2499 2499 _date_latest = parse_datetime(cs_cache.get('date') or empty_date)
2500 2500
2501 2501 cs_cache['updated_on'] = time.time()
2502 2502 self.changeset_cache = cs_cache
2503 2503 self.updated_on = _date_latest
2504 2504 Session().add(self)
2505 2505 Session().commit()
2506 2506
2507 2507 log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s',
2508 2508 self.repo_name, cs_cache, _date_latest)
2509 2509
2510 2510 @property
2511 2511 def tip(self):
2512 2512 return self.get_commit('tip')
2513 2513
2514 2514 @property
2515 2515 def author(self):
2516 2516 return self.tip.author
2517 2517
2518 2518 @property
2519 2519 def last_change(self):
2520 2520 return self.scm_instance().last_change
2521 2521
2522 2522 def get_comments(self, revisions=None):
2523 2523 """
2524 2524 Returns comments for this repository grouped by revisions
2525 2525
2526 2526 :param revisions: filter query by revisions only
2527 2527 """
2528 2528 cmts = ChangesetComment.query()\
2529 2529 .filter(ChangesetComment.repo == self)
2530 2530 if revisions:
2531 2531 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2532 2532 grouped = collections.defaultdict(list)
2533 2533 for cmt in cmts.all():
2534 2534 grouped[cmt.revision].append(cmt)
2535 2535 return grouped
2536 2536
2537 2537 def statuses(self, revisions=None):
2538 2538 """
2539 2539 Returns statuses for this repository
2540 2540
2541 2541 :param revisions: list of revisions to get statuses for
2542 2542 """
2543 2543 statuses = ChangesetStatus.query()\
2544 2544 .filter(ChangesetStatus.repo == self)\
2545 2545 .filter(ChangesetStatus.version == 0)
2546 2546
2547 2547 if revisions:
2548 2548 # Try doing the filtering in chunks to avoid hitting limits
2549 2549 size = 500
2550 2550 status_results = []
2551 2551 for chunk in xrange(0, len(revisions), size):
2552 2552 status_results += statuses.filter(
2553 2553 ChangesetStatus.revision.in_(
2554 2554 revisions[chunk: chunk+size])
2555 2555 ).all()
2556 2556 else:
2557 2557 status_results = statuses.all()
2558 2558
2559 2559 grouped = {}
2560 2560
2561 2561 # maybe we have open new pullrequest without a status?
2562 2562 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2563 2563 status_lbl = ChangesetStatus.get_status_lbl(stat)
2564 2564 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2565 2565 for rev in pr.revisions:
2566 2566 pr_id = pr.pull_request_id
2567 2567 pr_repo = pr.target_repo.repo_name
2568 2568 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2569 2569
2570 2570 for stat in status_results:
2571 2571 pr_id = pr_repo = None
2572 2572 if stat.pull_request:
2573 2573 pr_id = stat.pull_request.pull_request_id
2574 2574 pr_repo = stat.pull_request.target_repo.repo_name
2575 2575 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2576 2576 pr_id, pr_repo]
2577 2577 return grouped
2578 2578
2579 2579 # ==========================================================================
2580 2580 # SCM CACHE INSTANCE
2581 2581 # ==========================================================================
2582 2582
2583 2583 def scm_instance(self, **kwargs):
2584 2584 import rhodecode
2585 2585
2586 2586 # Passing a config will not hit the cache currently only used
2587 2587 # for repo2dbmapper
2588 2588 config = kwargs.pop('config', None)
2589 2589 cache = kwargs.pop('cache', None)
2590 2590 vcs_full_cache = kwargs.pop('vcs_full_cache', None)
2591 2591 if vcs_full_cache is not None:
2592 2592 # allows override global config
2593 2593 full_cache = vcs_full_cache
2594 2594 else:
2595 2595 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2596 2596 # if cache is NOT defined use default global, else we have a full
2597 2597 # control over cache behaviour
2598 2598 if cache is None and full_cache and not config:
2599 2599 log.debug('Initializing pure cached instance for %s', self.repo_path)
2600 2600 return self._get_instance_cached()
2601 2601
2602 2602 # cache here is sent to the "vcs server"
2603 2603 return self._get_instance(cache=bool(cache), config=config)
2604 2604
2605 2605 def _get_instance_cached(self):
2606 2606 from rhodecode.lib import rc_cache
2607 2607
2608 2608 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2609 2609 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2610 2610 repo_id=self.repo_id)
2611 2611 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2612 2612
2613 2613 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2614 2614 def get_instance_cached(repo_id, context_id, _cache_state_uid):
2615 2615 return self._get_instance(repo_state_uid=_cache_state_uid)
2616 2616
2617 2617 # we must use thread scoped cache here,
2618 2618 # because each thread of gevent needs it's own not shared connection and cache
2619 2619 # we also alter `args` so the cache key is individual for every green thread.
2620 2620 inv_context_manager = rc_cache.InvalidationContext(
2621 2621 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2622 2622 thread_scoped=True)
2623 2623 with inv_context_manager as invalidation_context:
2624 2624 cache_state_uid = invalidation_context.cache_data['cache_state_uid']
2625 2625 args = (self.repo_id, inv_context_manager.cache_key, cache_state_uid)
2626 2626
2627 2627 # re-compute and store cache if we get invalidate signal
2628 2628 if invalidation_context.should_invalidate():
2629 2629 instance = get_instance_cached.refresh(*args)
2630 2630 else:
2631 2631 instance = get_instance_cached(*args)
2632 2632
2633 2633 log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time)
2634 2634 return instance
2635 2635
2636 2636 def _get_instance(self, cache=True, config=None, repo_state_uid=None):
2637 2637 log.debug('Initializing %s instance `%s` with cache flag set to: %s',
2638 2638 self.repo_type, self.repo_path, cache)
2639 2639 config = config or self._config
2640 2640 custom_wire = {
2641 2641 'cache': cache, # controls the vcs.remote cache
2642 2642 'repo_state_uid': repo_state_uid
2643 2643 }
2644 2644 repo = get_vcs_instance(
2645 2645 repo_path=safe_str(self.repo_full_path),
2646 2646 config=config,
2647 2647 with_wire=custom_wire,
2648 2648 create=False,
2649 2649 _vcs_alias=self.repo_type)
2650 2650 if repo is not None:
2651 2651 repo.count() # cache rebuild
2652 2652 return repo
2653 2653
2654 2654 def get_shadow_repository_path(self, workspace_id):
2655 2655 from rhodecode.lib.vcs.backends.base import BaseRepository
2656 2656 shadow_repo_path = BaseRepository._get_shadow_repository_path(
2657 2657 self.repo_full_path, self.repo_id, workspace_id)
2658 2658 return shadow_repo_path
2659 2659
2660 2660 def __json__(self):
2661 2661 return {'landing_rev': self.landing_rev}
2662 2662
2663 2663 def get_dict(self):
2664 2664
2665 2665 # Since we transformed `repo_name` to a hybrid property, we need to
2666 2666 # keep compatibility with the code which uses `repo_name` field.
2667 2667
2668 2668 result = super(Repository, self).get_dict()
2669 2669 result['repo_name'] = result.pop('_repo_name', None)
2670 2670 return result
2671 2671
2672 2672
2673 2673 class RepoGroup(Base, BaseModel):
2674 2674 __tablename__ = 'groups'
2675 2675 __table_args__ = (
2676 2676 UniqueConstraint('group_name', 'group_parent_id'),
2677 2677 base_table_args,
2678 2678 )
2679 2679 __mapper_args__ = {'order_by': 'group_name'}
2680 2680
2681 2681 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2682 2682
2683 2683 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2684 2684 _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2685 2685 group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False)
2686 2686 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2687 2687 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2688 2688 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2689 2689 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2690 2690 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2691 2691 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2692 2692 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2693 2693 _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data
2694 2694
2695 2695 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2696 2696 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2697 2697 parent_group = relationship('RepoGroup', remote_side=group_id)
2698 2698 user = relationship('User')
2699 2699 integrations = relationship('Integration', cascade="all, delete-orphan")
2700 2700
2701 2701 # no cascade, set NULL
2702 2702 scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id')
2703 2703
2704 2704 def __init__(self, group_name='', parent_group=None):
2705 2705 self.group_name = group_name
2706 2706 self.parent_group = parent_group
2707 2707
2708 2708 def __unicode__(self):
2709 2709 return u"<%s('id:%s:%s')>" % (
2710 2710 self.__class__.__name__, self.group_id, self.group_name)
2711 2711
2712 2712 @hybrid_property
2713 2713 def group_name(self):
2714 2714 return self._group_name
2715 2715
2716 2716 @group_name.setter
2717 2717 def group_name(self, value):
2718 2718 self._group_name = value
2719 2719 self.group_name_hash = self.hash_repo_group_name(value)
2720 2720
2721 2721 @classmethod
2722 2722 def _load_changeset_cache(cls, repo_id, changeset_cache_raw):
2723 2723 from rhodecode.lib.vcs.backends.base import EmptyCommit
2724 2724 dummy = EmptyCommit().__json__()
2725 2725 if not changeset_cache_raw:
2726 2726 dummy['source_repo_id'] = repo_id
2727 2727 return json.loads(json.dumps(dummy))
2728 2728
2729 2729 try:
2730 2730 return json.loads(changeset_cache_raw)
2731 2731 except TypeError:
2732 2732 return dummy
2733 2733 except Exception:
2734 2734 log.error(traceback.format_exc())
2735 2735 return dummy
2736 2736
2737 2737 @hybrid_property
2738 2738 def changeset_cache(self):
2739 2739 return self._load_changeset_cache('', self._changeset_cache)
2740 2740
2741 2741 @changeset_cache.setter
2742 2742 def changeset_cache(self, val):
2743 2743 try:
2744 2744 self._changeset_cache = json.dumps(val)
2745 2745 except Exception:
2746 2746 log.error(traceback.format_exc())
2747 2747
2748 2748 @validates('group_parent_id')
2749 2749 def validate_group_parent_id(self, key, val):
2750 2750 """
2751 2751 Check cycle references for a parent group to self
2752 2752 """
2753 2753 if self.group_id and val:
2754 2754 assert val != self.group_id
2755 2755
2756 2756 return val
2757 2757
2758 2758 @hybrid_property
2759 2759 def description_safe(self):
2760 2760 from rhodecode.lib import helpers as h
2761 2761 return h.escape(self.group_description)
2762 2762
2763 2763 @classmethod
2764 2764 def hash_repo_group_name(cls, repo_group_name):
2765 2765 val = remove_formatting(repo_group_name)
2766 2766 val = safe_str(val).lower()
2767 2767 chars = []
2768 2768 for c in val:
2769 2769 if c not in string.ascii_letters:
2770 2770 c = str(ord(c))
2771 2771 chars.append(c)
2772 2772
2773 2773 return ''.join(chars)
2774 2774
2775 2775 @classmethod
2776 2776 def _generate_choice(cls, repo_group):
2777 2777 from webhelpers2.html import literal as _literal
2778 2778 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2779 2779 return repo_group.group_id, _name(repo_group.full_path_splitted)
2780 2780
2781 2781 @classmethod
2782 2782 def groups_choices(cls, groups=None, show_empty_group=True):
2783 2783 if not groups:
2784 2784 groups = cls.query().all()
2785 2785
2786 2786 repo_groups = []
2787 2787 if show_empty_group:
2788 2788 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2789 2789
2790 2790 repo_groups.extend([cls._generate_choice(x) for x in groups])
2791 2791
2792 2792 repo_groups = sorted(
2793 2793 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2794 2794 return repo_groups
2795 2795
2796 2796 @classmethod
2797 2797 def url_sep(cls):
2798 2798 return URL_SEP
2799 2799
2800 2800 @classmethod
2801 2801 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2802 2802 if case_insensitive:
2803 2803 gr = cls.query().filter(func.lower(cls.group_name)
2804 2804 == func.lower(group_name))
2805 2805 else:
2806 2806 gr = cls.query().filter(cls.group_name == group_name)
2807 2807 if cache:
2808 2808 name_key = _hash_key(group_name)
2809 2809 gr = gr.options(
2810 2810 FromCache("sql_cache_short", "get_group_%s" % name_key))
2811 2811 return gr.scalar()
2812 2812
2813 2813 @classmethod
2814 2814 def get_user_personal_repo_group(cls, user_id):
2815 2815 user = User.get(user_id)
2816 2816 if user.username == User.DEFAULT_USER:
2817 2817 return None
2818 2818
2819 2819 return cls.query()\
2820 2820 .filter(cls.personal == true()) \
2821 2821 .filter(cls.user == user) \
2822 2822 .order_by(cls.group_id.asc()) \
2823 2823 .first()
2824 2824
2825 2825 @classmethod
2826 2826 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2827 2827 case_insensitive=True):
2828 2828 q = RepoGroup.query()
2829 2829
2830 2830 if not isinstance(user_id, Optional):
2831 2831 q = q.filter(RepoGroup.user_id == user_id)
2832 2832
2833 2833 if not isinstance(group_id, Optional):
2834 2834 q = q.filter(RepoGroup.group_parent_id == group_id)
2835 2835
2836 2836 if case_insensitive:
2837 2837 q = q.order_by(func.lower(RepoGroup.group_name))
2838 2838 else:
2839 2839 q = q.order_by(RepoGroup.group_name)
2840 2840 return q.all()
2841 2841
2842 2842 @property
2843 2843 def parents(self, parents_recursion_limit=10):
2844 2844 groups = []
2845 2845 if self.parent_group is None:
2846 2846 return groups
2847 2847 cur_gr = self.parent_group
2848 2848 groups.insert(0, cur_gr)
2849 2849 cnt = 0
2850 2850 while 1:
2851 2851 cnt += 1
2852 2852 gr = getattr(cur_gr, 'parent_group', None)
2853 2853 cur_gr = cur_gr.parent_group
2854 2854 if gr is None:
2855 2855 break
2856 2856 if cnt == parents_recursion_limit:
2857 2857 # this will prevent accidental infinit loops
2858 2858 log.error('more than %s parents found for group %s, stopping '
2859 2859 'recursive parent fetching', parents_recursion_limit, self)
2860 2860 break
2861 2861
2862 2862 groups.insert(0, gr)
2863 2863 return groups
2864 2864
2865 2865 @property
2866 2866 def last_commit_cache_update_diff(self):
2867 2867 return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0)
2868 2868
2869 2869 @classmethod
2870 2870 def _load_commit_change(cls, last_commit_cache):
2871 2871 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2872 2872 empty_date = datetime.datetime.fromtimestamp(0)
2873 2873 date_latest = last_commit_cache.get('date', empty_date)
2874 2874 try:
2875 2875 return parse_datetime(date_latest)
2876 2876 except Exception:
2877 2877 return empty_date
2878 2878
2879 2879 @property
2880 2880 def last_commit_change(self):
2881 2881 return self._load_commit_change(self.changeset_cache)
2882 2882
2883 2883 @property
2884 2884 def last_db_change(self):
2885 2885 return self.updated_on
2886 2886
2887 2887 @property
2888 2888 def children(self):
2889 2889 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2890 2890
2891 2891 @property
2892 2892 def name(self):
2893 2893 return self.group_name.split(RepoGroup.url_sep())[-1]
2894 2894
2895 2895 @property
2896 2896 def full_path(self):
2897 2897 return self.group_name
2898 2898
2899 2899 @property
2900 2900 def full_path_splitted(self):
2901 2901 return self.group_name.split(RepoGroup.url_sep())
2902 2902
2903 2903 @property
2904 2904 def repositories(self):
2905 2905 return Repository.query()\
2906 2906 .filter(Repository.group == self)\
2907 2907 .order_by(Repository.repo_name)
2908 2908
2909 2909 @property
2910 2910 def repositories_recursive_count(self):
2911 2911 cnt = self.repositories.count()
2912 2912
2913 2913 def children_count(group):
2914 2914 cnt = 0
2915 2915 for child in group.children:
2916 2916 cnt += child.repositories.count()
2917 2917 cnt += children_count(child)
2918 2918 return cnt
2919 2919
2920 2920 return cnt + children_count(self)
2921 2921
2922 2922 def _recursive_objects(self, include_repos=True, include_groups=True):
2923 2923 all_ = []
2924 2924
2925 2925 def _get_members(root_gr):
2926 2926 if include_repos:
2927 2927 for r in root_gr.repositories:
2928 2928 all_.append(r)
2929 2929 childs = root_gr.children.all()
2930 2930 if childs:
2931 2931 for gr in childs:
2932 2932 if include_groups:
2933 2933 all_.append(gr)
2934 2934 _get_members(gr)
2935 2935
2936 2936 root_group = []
2937 2937 if include_groups:
2938 2938 root_group = [self]
2939 2939
2940 2940 _get_members(self)
2941 2941 return root_group + all_
2942 2942
2943 2943 def recursive_groups_and_repos(self):
2944 2944 """
2945 2945 Recursive return all groups, with repositories in those groups
2946 2946 """
2947 2947 return self._recursive_objects()
2948 2948
2949 2949 def recursive_groups(self):
2950 2950 """
2951 2951 Returns all children groups for this group including children of children
2952 2952 """
2953 2953 return self._recursive_objects(include_repos=False)
2954 2954
2955 2955 def recursive_repos(self):
2956 2956 """
2957 2957 Returns all children repositories for this group
2958 2958 """
2959 2959 return self._recursive_objects(include_groups=False)
2960 2960
2961 2961 def get_new_name(self, group_name):
2962 2962 """
2963 2963 returns new full group name based on parent and new name
2964 2964
2965 2965 :param group_name:
2966 2966 """
2967 2967 path_prefix = (self.parent_group.full_path_splitted if
2968 2968 self.parent_group else [])
2969 2969 return RepoGroup.url_sep().join(path_prefix + [group_name])
2970 2970
2971 2971 def update_commit_cache(self, config=None):
2972 2972 """
2973 2973 Update cache of last commit for newest repository inside this repository group.
2974 2974 cache_keys should be::
2975 2975
2976 2976 source_repo_id
2977 2977 short_id
2978 2978 raw_id
2979 2979 revision
2980 2980 parents
2981 2981 message
2982 2982 date
2983 2983 author
2984 2984
2985 2985 """
2986 2986 from rhodecode.lib.vcs.utils.helpers import parse_datetime
2987 2987 empty_date = datetime.datetime.fromtimestamp(0)
2988 2988
2989 2989 def repo_groups_and_repos(root_gr):
2990 2990 for _repo in root_gr.repositories:
2991 2991 yield _repo
2992 2992 for child_group in root_gr.children.all():
2993 2993 yield child_group
2994 2994
2995 2995 latest_repo_cs_cache = {}
2996 2996 for obj in repo_groups_and_repos(self):
2997 2997 repo_cs_cache = obj.changeset_cache
2998 2998 date_latest = latest_repo_cs_cache.get('date', empty_date)
2999 2999 date_current = repo_cs_cache.get('date', empty_date)
3000 3000 current_timestamp = datetime_to_time(parse_datetime(date_latest))
3001 3001 if current_timestamp < datetime_to_time(parse_datetime(date_current)):
3002 3002 latest_repo_cs_cache = repo_cs_cache
3003 3003 if hasattr(obj, 'repo_id'):
3004 3004 latest_repo_cs_cache['source_repo_id'] = obj.repo_id
3005 3005 else:
3006 3006 latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id')
3007 3007
3008 3008 _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date)
3009 3009
3010 3010 latest_repo_cs_cache['updated_on'] = time.time()
3011 3011 self.changeset_cache = latest_repo_cs_cache
3012 3012 self.updated_on = _date_latest
3013 3013 Session().add(self)
3014 3014 Session().commit()
3015 3015
3016 3016 log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s',
3017 3017 self.group_name, latest_repo_cs_cache, _date_latest)
3018 3018
3019 3019 def permissions(self, with_admins=True, with_owner=True,
3020 3020 expand_from_user_groups=False):
3021 3021 """
3022 3022 Permissions for repository groups
3023 3023 """
3024 3024 _admin_perm = 'group.admin'
3025 3025
3026 3026 owner_row = []
3027 3027 if with_owner:
3028 3028 usr = AttributeDict(self.user.get_dict())
3029 3029 usr.owner_row = True
3030 3030 usr.permission = _admin_perm
3031 3031 owner_row.append(usr)
3032 3032
3033 3033 super_admin_ids = []
3034 3034 super_admin_rows = []
3035 3035 if with_admins:
3036 3036 for usr in User.get_all_super_admins():
3037 3037 super_admin_ids.append(usr.user_id)
3038 3038 # if this admin is also owner, don't double the record
3039 3039 if usr.user_id == owner_row[0].user_id:
3040 3040 owner_row[0].admin_row = True
3041 3041 else:
3042 3042 usr = AttributeDict(usr.get_dict())
3043 3043 usr.admin_row = True
3044 3044 usr.permission = _admin_perm
3045 3045 super_admin_rows.append(usr)
3046 3046
3047 3047 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
3048 3048 q = q.options(joinedload(UserRepoGroupToPerm.group),
3049 3049 joinedload(UserRepoGroupToPerm.user),
3050 3050 joinedload(UserRepoGroupToPerm.permission),)
3051 3051
3052 3052 # get owners and admins and permissions. We do a trick of re-writing
3053 3053 # objects from sqlalchemy to named-tuples due to sqlalchemy session
3054 3054 # has a global reference and changing one object propagates to all
3055 3055 # others. This means if admin is also an owner admin_row that change
3056 3056 # would propagate to both objects
3057 3057 perm_rows = []
3058 3058 for _usr in q.all():
3059 3059 usr = AttributeDict(_usr.user.get_dict())
3060 3060 # if this user is also owner/admin, mark as duplicate record
3061 3061 if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids:
3062 3062 usr.duplicate_perm = True
3063 3063 usr.permission = _usr.permission.permission_name
3064 3064 perm_rows.append(usr)
3065 3065
3066 3066 # filter the perm rows by 'default' first and then sort them by
3067 3067 # admin,write,read,none permissions sorted again alphabetically in
3068 3068 # each group
3069 3069 perm_rows = sorted(perm_rows, key=display_user_sort)
3070 3070
3071 3071 user_groups_rows = []
3072 3072 if expand_from_user_groups:
3073 3073 for ug in self.permission_user_groups(with_members=True):
3074 3074 for user_data in ug.members:
3075 3075 user_groups_rows.append(user_data)
3076 3076
3077 3077 return super_admin_rows + owner_row + perm_rows + user_groups_rows
3078 3078
3079 3079 def permission_user_groups(self, with_members=False):
3080 3080 q = UserGroupRepoGroupToPerm.query()\
3081 3081 .filter(UserGroupRepoGroupToPerm.group == self)
3082 3082 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
3083 3083 joinedload(UserGroupRepoGroupToPerm.users_group),
3084 3084 joinedload(UserGroupRepoGroupToPerm.permission),)
3085 3085
3086 3086 perm_rows = []
3087 3087 for _user_group in q.all():
3088 3088 entry = AttributeDict(_user_group.users_group.get_dict())
3089 3089 entry.permission = _user_group.permission.permission_name
3090 3090 if with_members:
3091 3091 entry.members = [x.user.get_dict()
3092 3092 for x in _user_group.users_group.members]
3093 3093 perm_rows.append(entry)
3094 3094
3095 3095 perm_rows = sorted(perm_rows, key=display_user_group_sort)
3096 3096 return perm_rows
3097 3097
3098 3098 def get_api_data(self):
3099 3099 """
3100 3100 Common function for generating api data
3101 3101
3102 3102 """
3103 3103 group = self
3104 3104 data = {
3105 3105 'group_id': group.group_id,
3106 3106 'group_name': group.group_name,
3107 3107 'group_description': group.description_safe,
3108 3108 'parent_group': group.parent_group.group_name if group.parent_group else None,
3109 3109 'repositories': [x.repo_name for x in group.repositories],
3110 3110 'owner': group.user.username,
3111 3111 }
3112 3112 return data
3113 3113
3114 3114 def get_dict(self):
3115 3115 # Since we transformed `group_name` to a hybrid property, we need to
3116 3116 # keep compatibility with the code which uses `group_name` field.
3117 3117 result = super(RepoGroup, self).get_dict()
3118 3118 result['group_name'] = result.pop('_group_name', None)
3119 3119 return result
3120 3120
3121 3121
3122 3122 class Permission(Base, BaseModel):
3123 3123 __tablename__ = 'permissions'
3124 3124 __table_args__ = (
3125 3125 Index('p_perm_name_idx', 'permission_name'),
3126 3126 base_table_args,
3127 3127 )
3128 3128
3129 3129 PERMS = [
3130 3130 ('hg.admin', _('RhodeCode Super Administrator')),
3131 3131
3132 3132 ('repository.none', _('Repository no access')),
3133 3133 ('repository.read', _('Repository read access')),
3134 3134 ('repository.write', _('Repository write access')),
3135 3135 ('repository.admin', _('Repository admin access')),
3136 3136
3137 3137 ('group.none', _('Repository group no access')),
3138 3138 ('group.read', _('Repository group read access')),
3139 3139 ('group.write', _('Repository group write access')),
3140 3140 ('group.admin', _('Repository group admin access')),
3141 3141
3142 3142 ('usergroup.none', _('User group no access')),
3143 3143 ('usergroup.read', _('User group read access')),
3144 3144 ('usergroup.write', _('User group write access')),
3145 3145 ('usergroup.admin', _('User group admin access')),
3146 3146
3147 3147 ('branch.none', _('Branch no permissions')),
3148 3148 ('branch.merge', _('Branch access by web merge')),
3149 3149 ('branch.push', _('Branch access by push')),
3150 3150 ('branch.push_force', _('Branch access by push with force')),
3151 3151
3152 3152 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
3153 3153 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
3154 3154
3155 3155 ('hg.usergroup.create.false', _('User Group creation disabled')),
3156 3156 ('hg.usergroup.create.true', _('User Group creation enabled')),
3157 3157
3158 3158 ('hg.create.none', _('Repository creation disabled')),
3159 3159 ('hg.create.repository', _('Repository creation enabled')),
3160 3160 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
3161 3161 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
3162 3162
3163 3163 ('hg.fork.none', _('Repository forking disabled')),
3164 3164 ('hg.fork.repository', _('Repository forking enabled')),
3165 3165
3166 3166 ('hg.register.none', _('Registration disabled')),
3167 3167 ('hg.register.manual_activate', _('User Registration with manual account activation')),
3168 3168 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
3169 3169
3170 3170 ('hg.password_reset.enabled', _('Password reset enabled')),
3171 3171 ('hg.password_reset.hidden', _('Password reset hidden')),
3172 3172 ('hg.password_reset.disabled', _('Password reset disabled')),
3173 3173
3174 3174 ('hg.extern_activate.manual', _('Manual activation of external account')),
3175 3175 ('hg.extern_activate.auto', _('Automatic activation of external account')),
3176 3176
3177 3177 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
3178 3178 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
3179 3179 ]
3180 3180
3181 3181 # definition of system default permissions for DEFAULT user, created on
3182 3182 # system setup
3183 3183 DEFAULT_USER_PERMISSIONS = [
3184 3184 # object perms
3185 3185 'repository.read',
3186 3186 'group.read',
3187 3187 'usergroup.read',
3188 3188 # branch, for backward compat we need same value as before so forced pushed
3189 3189 'branch.push_force',
3190 3190 # global
3191 3191 'hg.create.repository',
3192 3192 'hg.repogroup.create.false',
3193 3193 'hg.usergroup.create.false',
3194 3194 'hg.create.write_on_repogroup.true',
3195 3195 'hg.fork.repository',
3196 3196 'hg.register.manual_activate',
3197 3197 'hg.password_reset.enabled',
3198 3198 'hg.extern_activate.auto',
3199 3199 'hg.inherit_default_perms.true',
3200 3200 ]
3201 3201
3202 3202 # defines which permissions are more important higher the more important
3203 3203 # Weight defines which permissions are more important.
3204 3204 # The higher number the more important.
3205 3205 PERM_WEIGHTS = {
3206 3206 'repository.none': 0,
3207 3207 'repository.read': 1,
3208 3208 'repository.write': 3,
3209 3209 'repository.admin': 4,
3210 3210
3211 3211 'group.none': 0,
3212 3212 'group.read': 1,
3213 3213 'group.write': 3,
3214 3214 'group.admin': 4,
3215 3215
3216 3216 'usergroup.none': 0,
3217 3217 'usergroup.read': 1,
3218 3218 'usergroup.write': 3,
3219 3219 'usergroup.admin': 4,
3220 3220
3221 3221 'branch.none': 0,
3222 3222 'branch.merge': 1,
3223 3223 'branch.push': 3,
3224 3224 'branch.push_force': 4,
3225 3225
3226 3226 'hg.repogroup.create.false': 0,
3227 3227 'hg.repogroup.create.true': 1,
3228 3228
3229 3229 'hg.usergroup.create.false': 0,
3230 3230 'hg.usergroup.create.true': 1,
3231 3231
3232 3232 'hg.fork.none': 0,
3233 3233 'hg.fork.repository': 1,
3234 3234 'hg.create.none': 0,
3235 3235 'hg.create.repository': 1
3236 3236 }
3237 3237
3238 3238 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3239 3239 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
3240 3240 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
3241 3241
3242 3242 def __unicode__(self):
3243 3243 return u"<%s('%s:%s')>" % (
3244 3244 self.__class__.__name__, self.permission_id, self.permission_name
3245 3245 )
3246 3246
3247 3247 @classmethod
3248 3248 def get_by_key(cls, key):
3249 3249 return cls.query().filter(cls.permission_name == key).scalar()
3250 3250
3251 3251 @classmethod
3252 3252 def get_default_repo_perms(cls, user_id, repo_id=None):
3253 3253 q = Session().query(UserRepoToPerm, Repository, Permission)\
3254 3254 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
3255 3255 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
3256 3256 .filter(UserRepoToPerm.user_id == user_id)
3257 3257 if repo_id:
3258 3258 q = q.filter(UserRepoToPerm.repository_id == repo_id)
3259 3259 return q.all()
3260 3260
3261 3261 @classmethod
3262 3262 def get_default_repo_branch_perms(cls, user_id, repo_id=None):
3263 3263 q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \
3264 3264 .join(
3265 3265 Permission,
3266 3266 UserToRepoBranchPermission.permission_id == Permission.permission_id) \
3267 3267 .join(
3268 3268 UserRepoToPerm,
3269 3269 UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \
3270 3270 .filter(UserRepoToPerm.user_id == user_id)
3271 3271
3272 3272 if repo_id:
3273 3273 q = q.filter(UserToRepoBranchPermission.repository_id == repo_id)
3274 3274 return q.order_by(UserToRepoBranchPermission.rule_order).all()
3275 3275
3276 3276 @classmethod
3277 3277 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
3278 3278 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
3279 3279 .join(
3280 3280 Permission,
3281 3281 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
3282 3282 .join(
3283 3283 Repository,
3284 3284 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
3285 3285 .join(
3286 3286 UserGroup,
3287 3287 UserGroupRepoToPerm.users_group_id ==
3288 3288 UserGroup.users_group_id)\
3289 3289 .join(
3290 3290 UserGroupMember,
3291 3291 UserGroupRepoToPerm.users_group_id ==
3292 3292 UserGroupMember.users_group_id)\
3293 3293 .filter(
3294 3294 UserGroupMember.user_id == user_id,
3295 3295 UserGroup.users_group_active == true())
3296 3296 if repo_id:
3297 3297 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
3298 3298 return q.all()
3299 3299
3300 3300 @classmethod
3301 3301 def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None):
3302 3302 q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \
3303 3303 .join(
3304 3304 Permission,
3305 3305 UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \
3306 3306 .join(
3307 3307 UserGroupRepoToPerm,
3308 3308 UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \
3309 3309 .join(
3310 3310 UserGroup,
3311 3311 UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \
3312 3312 .join(
3313 3313 UserGroupMember,
3314 3314 UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \
3315 3315 .filter(
3316 3316 UserGroupMember.user_id == user_id,
3317 3317 UserGroup.users_group_active == true())
3318 3318
3319 3319 if repo_id:
3320 3320 q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id)
3321 3321 return q.order_by(UserGroupToRepoBranchPermission.rule_order).all()
3322 3322
3323 3323 @classmethod
3324 3324 def get_default_group_perms(cls, user_id, repo_group_id=None):
3325 3325 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
3326 3326 .join(
3327 3327 Permission,
3328 3328 UserRepoGroupToPerm.permission_id == Permission.permission_id)\
3329 3329 .join(
3330 3330 RepoGroup,
3331 3331 UserRepoGroupToPerm.group_id == RepoGroup.group_id)\
3332 3332 .filter(UserRepoGroupToPerm.user_id == user_id)
3333 3333 if repo_group_id:
3334 3334 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
3335 3335 return q.all()
3336 3336
3337 3337 @classmethod
3338 3338 def get_default_group_perms_from_user_group(
3339 3339 cls, user_id, repo_group_id=None):
3340 3340 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
3341 3341 .join(
3342 3342 Permission,
3343 3343 UserGroupRepoGroupToPerm.permission_id ==
3344 3344 Permission.permission_id)\
3345 3345 .join(
3346 3346 RepoGroup,
3347 3347 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
3348 3348 .join(
3349 3349 UserGroup,
3350 3350 UserGroupRepoGroupToPerm.users_group_id ==
3351 3351 UserGroup.users_group_id)\
3352 3352 .join(
3353 3353 UserGroupMember,
3354 3354 UserGroupRepoGroupToPerm.users_group_id ==
3355 3355 UserGroupMember.users_group_id)\
3356 3356 .filter(
3357 3357 UserGroupMember.user_id == user_id,
3358 3358 UserGroup.users_group_active == true())
3359 3359 if repo_group_id:
3360 3360 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
3361 3361 return q.all()
3362 3362
3363 3363 @classmethod
3364 3364 def get_default_user_group_perms(cls, user_id, user_group_id=None):
3365 3365 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
3366 3366 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
3367 3367 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
3368 3368 .filter(UserUserGroupToPerm.user_id == user_id)
3369 3369 if user_group_id:
3370 3370 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
3371 3371 return q.all()
3372 3372
3373 3373 @classmethod
3374 3374 def get_default_user_group_perms_from_user_group(
3375 3375 cls, user_id, user_group_id=None):
3376 3376 TargetUserGroup = aliased(UserGroup, name='target_user_group')
3377 3377 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
3378 3378 .join(
3379 3379 Permission,
3380 3380 UserGroupUserGroupToPerm.permission_id ==
3381 3381 Permission.permission_id)\
3382 3382 .join(
3383 3383 TargetUserGroup,
3384 3384 UserGroupUserGroupToPerm.target_user_group_id ==
3385 3385 TargetUserGroup.users_group_id)\
3386 3386 .join(
3387 3387 UserGroup,
3388 3388 UserGroupUserGroupToPerm.user_group_id ==
3389 3389 UserGroup.users_group_id)\
3390 3390 .join(
3391 3391 UserGroupMember,
3392 3392 UserGroupUserGroupToPerm.user_group_id ==
3393 3393 UserGroupMember.users_group_id)\
3394 3394 .filter(
3395 3395 UserGroupMember.user_id == user_id,
3396 3396 UserGroup.users_group_active == true())
3397 3397 if user_group_id:
3398 3398 q = q.filter(
3399 3399 UserGroupUserGroupToPerm.user_group_id == user_group_id)
3400 3400
3401 3401 return q.all()
3402 3402
3403 3403
3404 3404 class UserRepoToPerm(Base, BaseModel):
3405 3405 __tablename__ = 'repo_to_perm'
3406 3406 __table_args__ = (
3407 3407 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
3408 3408 base_table_args
3409 3409 )
3410 3410
3411 3411 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3412 3412 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3413 3413 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3414 3414 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3415 3415
3416 3416 user = relationship('User')
3417 3417 repository = relationship('Repository')
3418 3418 permission = relationship('Permission')
3419 3419
3420 3420 branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined')
3421 3421
3422 3422 @classmethod
3423 3423 def create(cls, user, repository, permission):
3424 3424 n = cls()
3425 3425 n.user = user
3426 3426 n.repository = repository
3427 3427 n.permission = permission
3428 3428 Session().add(n)
3429 3429 return n
3430 3430
3431 3431 def __unicode__(self):
3432 3432 return u'<%s => %s >' % (self.user, self.repository)
3433 3433
3434 3434
3435 3435 class UserUserGroupToPerm(Base, BaseModel):
3436 3436 __tablename__ = 'user_user_group_to_perm'
3437 3437 __table_args__ = (
3438 3438 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
3439 3439 base_table_args
3440 3440 )
3441 3441
3442 3442 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3443 3443 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3444 3444 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3445 3445 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3446 3446
3447 3447 user = relationship('User')
3448 3448 user_group = relationship('UserGroup')
3449 3449 permission = relationship('Permission')
3450 3450
3451 3451 @classmethod
3452 3452 def create(cls, user, user_group, permission):
3453 3453 n = cls()
3454 3454 n.user = user
3455 3455 n.user_group = user_group
3456 3456 n.permission = permission
3457 3457 Session().add(n)
3458 3458 return n
3459 3459
3460 3460 def __unicode__(self):
3461 3461 return u'<%s => %s >' % (self.user, self.user_group)
3462 3462
3463 3463
3464 3464 class UserToPerm(Base, BaseModel):
3465 3465 __tablename__ = 'user_to_perm'
3466 3466 __table_args__ = (
3467 3467 UniqueConstraint('user_id', 'permission_id'),
3468 3468 base_table_args
3469 3469 )
3470 3470
3471 3471 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3472 3472 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3473 3473 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3474 3474
3475 3475 user = relationship('User')
3476 3476 permission = relationship('Permission', lazy='joined')
3477 3477
3478 3478 def __unicode__(self):
3479 3479 return u'<%s => %s >' % (self.user, self.permission)
3480 3480
3481 3481
3482 3482 class UserGroupRepoToPerm(Base, BaseModel):
3483 3483 __tablename__ = 'users_group_repo_to_perm'
3484 3484 __table_args__ = (
3485 3485 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
3486 3486 base_table_args
3487 3487 )
3488 3488
3489 3489 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3490 3490 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3491 3491 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3492 3492 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
3493 3493
3494 3494 users_group = relationship('UserGroup')
3495 3495 permission = relationship('Permission')
3496 3496 repository = relationship('Repository')
3497 3497 user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all')
3498 3498
3499 3499 @classmethod
3500 3500 def create(cls, users_group, repository, permission):
3501 3501 n = cls()
3502 3502 n.users_group = users_group
3503 3503 n.repository = repository
3504 3504 n.permission = permission
3505 3505 Session().add(n)
3506 3506 return n
3507 3507
3508 3508 def __unicode__(self):
3509 3509 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
3510 3510
3511 3511
3512 3512 class UserGroupUserGroupToPerm(Base, BaseModel):
3513 3513 __tablename__ = 'user_group_user_group_to_perm'
3514 3514 __table_args__ = (
3515 3515 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3516 3516 CheckConstraint('target_user_group_id != user_group_id'),
3517 3517 base_table_args
3518 3518 )
3519 3519
3520 3520 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3521 3521 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3522 3522 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3523 3523 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3524 3524
3525 3525 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3526 3526 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3527 3527 permission = relationship('Permission')
3528 3528
3529 3529 @classmethod
3530 3530 def create(cls, target_user_group, user_group, permission):
3531 3531 n = cls()
3532 3532 n.target_user_group = target_user_group
3533 3533 n.user_group = user_group
3534 3534 n.permission = permission
3535 3535 Session().add(n)
3536 3536 return n
3537 3537
3538 3538 def __unicode__(self):
3539 3539 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3540 3540
3541 3541
3542 3542 class UserGroupToPerm(Base, BaseModel):
3543 3543 __tablename__ = 'users_group_to_perm'
3544 3544 __table_args__ = (
3545 3545 UniqueConstraint('users_group_id', 'permission_id',),
3546 3546 base_table_args
3547 3547 )
3548 3548
3549 3549 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3550 3550 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3551 3551 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3552 3552
3553 3553 users_group = relationship('UserGroup')
3554 3554 permission = relationship('Permission')
3555 3555
3556 3556
3557 3557 class UserRepoGroupToPerm(Base, BaseModel):
3558 3558 __tablename__ = 'user_repo_group_to_perm'
3559 3559 __table_args__ = (
3560 3560 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3561 3561 base_table_args
3562 3562 )
3563 3563
3564 3564 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3565 3565 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3566 3566 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3567 3567 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3568 3568
3569 3569 user = relationship('User')
3570 3570 group = relationship('RepoGroup')
3571 3571 permission = relationship('Permission')
3572 3572
3573 3573 @classmethod
3574 3574 def create(cls, user, repository_group, permission):
3575 3575 n = cls()
3576 3576 n.user = user
3577 3577 n.group = repository_group
3578 3578 n.permission = permission
3579 3579 Session().add(n)
3580 3580 return n
3581 3581
3582 3582
3583 3583 class UserGroupRepoGroupToPerm(Base, BaseModel):
3584 3584 __tablename__ = 'users_group_repo_group_to_perm'
3585 3585 __table_args__ = (
3586 3586 UniqueConstraint('users_group_id', 'group_id'),
3587 3587 base_table_args
3588 3588 )
3589 3589
3590 3590 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3591 3591 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3592 3592 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3593 3593 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3594 3594
3595 3595 users_group = relationship('UserGroup')
3596 3596 permission = relationship('Permission')
3597 3597 group = relationship('RepoGroup')
3598 3598
3599 3599 @classmethod
3600 3600 def create(cls, user_group, repository_group, permission):
3601 3601 n = cls()
3602 3602 n.users_group = user_group
3603 3603 n.group = repository_group
3604 3604 n.permission = permission
3605 3605 Session().add(n)
3606 3606 return n
3607 3607
3608 3608 def __unicode__(self):
3609 3609 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3610 3610
3611 3611
3612 3612 class Statistics(Base, BaseModel):
3613 3613 __tablename__ = 'statistics'
3614 3614 __table_args__ = (
3615 3615 base_table_args
3616 3616 )
3617 3617
3618 3618 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3619 3619 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3620 3620 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3621 3621 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3622 3622 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3623 3623 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3624 3624
3625 3625 repository = relationship('Repository', single_parent=True)
3626 3626
3627 3627
3628 3628 class UserFollowing(Base, BaseModel):
3629 3629 __tablename__ = 'user_followings'
3630 3630 __table_args__ = (
3631 3631 UniqueConstraint('user_id', 'follows_repository_id'),
3632 3632 UniqueConstraint('user_id', 'follows_user_id'),
3633 3633 base_table_args
3634 3634 )
3635 3635
3636 3636 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3637 3637 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3638 3638 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3639 3639 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3640 3640 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3641 3641
3642 3642 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3643 3643
3644 3644 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3645 3645 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3646 3646
3647 3647 @classmethod
3648 3648 def get_repo_followers(cls, repo_id):
3649 3649 return cls.query().filter(cls.follows_repo_id == repo_id)
3650 3650
3651 3651
3652 3652 class CacheKey(Base, BaseModel):
3653 3653 __tablename__ = 'cache_invalidation'
3654 3654 __table_args__ = (
3655 3655 UniqueConstraint('cache_key'),
3656 3656 Index('key_idx', 'cache_key'),
3657 3657 base_table_args,
3658 3658 )
3659 3659
3660 3660 CACHE_TYPE_FEED = 'FEED'
3661 3661
3662 3662 # namespaces used to register process/thread aware caches
3663 3663 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3664 3664 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3665 3665
3666 3666 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3667 3667 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3668 3668 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3669 3669 cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None)
3670 3670 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3671 3671
3672 3672 def __init__(self, cache_key, cache_args='', cache_state_uid=None):
3673 3673 self.cache_key = cache_key
3674 3674 self.cache_args = cache_args
3675 3675 self.cache_active = False
3676 3676 # first key should be same for all entries, since all workers should share it
3677 3677 self.cache_state_uid = cache_state_uid or self.generate_new_state_uid()
3678 3678
3679 3679 def __unicode__(self):
3680 3680 return u"<%s('%s:%s[%s]')>" % (
3681 3681 self.__class__.__name__,
3682 3682 self.cache_id, self.cache_key, self.cache_active)
3683 3683
3684 3684 def _cache_key_partition(self):
3685 3685 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3686 3686 return prefix, repo_name, suffix
3687 3687
3688 3688 def get_prefix(self):
3689 3689 """
3690 3690 Try to extract prefix from existing cache key. The key could consist
3691 3691 of prefix, repo_name, suffix
3692 3692 """
3693 3693 # this returns prefix, repo_name, suffix
3694 3694 return self._cache_key_partition()[0]
3695 3695
3696 3696 def get_suffix(self):
3697 3697 """
3698 3698 get suffix that might have been used in _get_cache_key to
3699 3699 generate self.cache_key. Only used for informational purposes
3700 3700 in repo_edit.mako.
3701 3701 """
3702 3702 # prefix, repo_name, suffix
3703 3703 return self._cache_key_partition()[2]
3704 3704
3705 3705 @classmethod
3706 3706 def generate_new_state_uid(cls, based_on=None):
3707 3707 if based_on:
3708 3708 return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on)))
3709 3709 else:
3710 3710 return str(uuid.uuid4())
3711 3711
3712 3712 @classmethod
3713 3713 def delete_all_cache(cls):
3714 3714 """
3715 3715 Delete all cache keys from database.
3716 3716 Should only be run when all instances are down and all entries
3717 3717 thus stale.
3718 3718 """
3719 3719 cls.query().delete()
3720 3720 Session().commit()
3721 3721
3722 3722 @classmethod
3723 3723 def set_invalidate(cls, cache_uid, delete=False):
3724 3724 """
3725 3725 Mark all caches of a repo as invalid in the database.
3726 3726 """
3727 3727
3728 3728 try:
3729 3729 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3730 3730 if delete:
3731 3731 qry.delete()
3732 3732 log.debug('cache objects deleted for cache args %s',
3733 3733 safe_str(cache_uid))
3734 3734 else:
3735 3735 qry.update({"cache_active": False,
3736 3736 "cache_state_uid": cls.generate_new_state_uid()})
3737 3737 log.debug('cache objects marked as invalid for cache args %s',
3738 3738 safe_str(cache_uid))
3739 3739
3740 3740 Session().commit()
3741 3741 except Exception:
3742 3742 log.exception(
3743 3743 'Cache key invalidation failed for cache args %s',
3744 3744 safe_str(cache_uid))
3745 3745 Session().rollback()
3746 3746
3747 3747 @classmethod
3748 3748 def get_active_cache(cls, cache_key):
3749 3749 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3750 3750 if inv_obj:
3751 3751 return inv_obj
3752 3752 return None
3753 3753
3754 3754 @classmethod
3755 3755 def get_namespace_map(cls, namespace):
3756 3756 return {
3757 3757 x.cache_key: x
3758 3758 for x in cls.query().filter(cls.cache_args == namespace)}
3759 3759
3760 3760
3761 3761 class ChangesetComment(Base, BaseModel):
3762 3762 __tablename__ = 'changeset_comments'
3763 3763 __table_args__ = (
3764 3764 Index('cc_revision_idx', 'revision'),
3765 3765 base_table_args,
3766 3766 )
3767 3767
3768 3768 COMMENT_OUTDATED = u'comment_outdated'
3769 3769 COMMENT_TYPE_NOTE = u'note'
3770 3770 COMMENT_TYPE_TODO = u'todo'
3771 3771 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3772 3772
3773 3773 OP_IMMUTABLE = u'immutable'
3774 3774 OP_CHANGEABLE = u'changeable'
3775 3775
3776 3776 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3777 3777 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3778 3778 revision = Column('revision', String(40), nullable=True)
3779 3779 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3780 3780 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3781 3781 line_no = Column('line_no', Unicode(10), nullable=True)
3782 3782 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3783 3783 f_path = Column('f_path', Unicode(1000), nullable=True)
3784 3784 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3785 3785 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3786 3786 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3787 3787 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3788 3788 renderer = Column('renderer', Unicode(64), nullable=True)
3789 3789 display_state = Column('display_state', Unicode(128), nullable=True)
3790 3790 immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE)
3791 3791 draft = Column('draft', Boolean(), nullable=True, default=False)
3792 3792
3793 3793 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3794 3794 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3795 3795
3796 3796 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by')
3797 3797 resolved_by = relationship('ChangesetComment', back_populates='resolved_comment')
3798 3798
3799 3799 author = relationship('User', lazy='select')
3800 3800 repo = relationship('Repository')
3801 3801 status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select')
3802 3802 pull_request = relationship('PullRequest', lazy='select')
3803 3803 pull_request_version = relationship('PullRequestVersion', lazy='select')
3804 3804 history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version')
3805 3805
3806 3806 @classmethod
3807 3807 def get_users(cls, revision=None, pull_request_id=None):
3808 3808 """
3809 3809 Returns user associated with this ChangesetComment. ie those
3810 3810 who actually commented
3811 3811
3812 3812 :param cls:
3813 3813 :param revision:
3814 3814 """
3815 3815 q = Session().query(User)\
3816 3816 .join(ChangesetComment.author)
3817 3817 if revision:
3818 3818 q = q.filter(cls.revision == revision)
3819 3819 elif pull_request_id:
3820 3820 q = q.filter(cls.pull_request_id == pull_request_id)
3821 3821 return q.all()
3822 3822
3823 3823 @classmethod
3824 3824 def get_index_from_version(cls, pr_version, versions=None, num_versions=None):
3825 3825
3826 3826 if versions is not None:
3827 3827 num_versions = [x.pull_request_version_id for x in versions]
3828 3828
3829 3829 num_versions = num_versions or []
3830 3830 try:
3831 3831 return num_versions.index(pr_version) + 1
3832 3832 except (IndexError, ValueError):
3833 3833 return
3834 3834
3835 3835 @property
3836 3836 def outdated(self):
3837 3837 return self.display_state == self.COMMENT_OUTDATED
3838 3838
3839 3839 @property
3840 3840 def outdated_js(self):
3841 3841 return json.dumps(self.display_state == self.COMMENT_OUTDATED)
3842 3842
3843 3843 @property
3844 3844 def immutable(self):
3845 3845 return self.immutable_state == self.OP_IMMUTABLE
3846 3846
3847 3847 def outdated_at_version(self, version):
3848 3848 """
3849 3849 Checks if comment is outdated for given pull request version
3850 3850 """
3851 3851 def version_check():
3852 3852 return self.pull_request_version_id and self.pull_request_version_id != version
3853 3853
3854 3854 if self.is_inline:
3855 3855 return self.outdated and version_check()
3856 3856 else:
3857 3857 # general comments don't have .outdated set, also latest don't have a version
3858 3858 return version_check()
3859 3859
3860 3860 def outdated_at_version_js(self, version):
3861 3861 """
3862 3862 Checks if comment is outdated for given pull request version
3863 3863 """
3864 3864 return json.dumps(self.outdated_at_version(version))
3865 3865
3866 3866 def older_than_version(self, version):
3867 3867 """
3868 3868 Checks if comment is made from previous version than given
3869 3869 """
3870 3870 if version is None:
3871 3871 return self.pull_request_version != version
3872 3872
3873 3873 return self.pull_request_version < version
3874 3874
3875 3875 def older_than_version_js(self, version):
3876 3876 """
3877 3877 Checks if comment is made from previous version than given
3878 3878 """
3879 3879 return json.dumps(self.older_than_version(version))
3880 3880
3881 3881 @property
3882 3882 def commit_id(self):
3883 3883 """New style naming to stop using .revision"""
3884 3884 return self.revision
3885 3885
3886 3886 @property
3887 3887 def resolved(self):
3888 3888 return self.resolved_by[0] if self.resolved_by else None
3889 3889
3890 3890 @property
3891 3891 def is_todo(self):
3892 3892 return self.comment_type == self.COMMENT_TYPE_TODO
3893 3893
3894 3894 @property
3895 3895 def is_inline(self):
3896 3896 if self.line_no and self.f_path:
3897 3897 return True
3898 3898 return False
3899 3899
3900 3900 @property
3901 3901 def last_version(self):
3902 3902 version = 0
3903 3903 if self.history:
3904 3904 version = self.history[-1].version
3905 3905 return version
3906 3906
3907 3907 def get_index_version(self, versions):
3908 3908 return self.get_index_from_version(
3909 3909 self.pull_request_version_id, versions)
3910 3910
3911 3911 @property
3912 3912 def review_status(self):
3913 3913 if self.status_change:
3914 3914 return self.status_change[0].status
3915 3915
3916 3916 @property
3917 3917 def review_status_lbl(self):
3918 3918 if self.status_change:
3919 3919 return self.status_change[0].status_lbl
3920 3920
3921 3921 def __repr__(self):
3922 3922 if self.comment_id:
3923 3923 return '<DB:Comment #%s>' % self.comment_id
3924 3924 else:
3925 3925 return '<DB:Comment at %#x>' % id(self)
3926 3926
3927 3927 def get_api_data(self):
3928 3928 comment = self
3929 3929
3930 3930 data = {
3931 3931 'comment_id': comment.comment_id,
3932 3932 'comment_type': comment.comment_type,
3933 3933 'comment_text': comment.text,
3934 3934 'comment_status': comment.status_change,
3935 3935 'comment_f_path': comment.f_path,
3936 3936 'comment_lineno': comment.line_no,
3937 3937 'comment_author': comment.author,
3938 3938 'comment_created_on': comment.created_on,
3939 3939 'comment_resolved_by': self.resolved,
3940 3940 'comment_commit_id': comment.revision,
3941 3941 'comment_pull_request_id': comment.pull_request_id,
3942 3942 'comment_last_version': self.last_version
3943 3943 }
3944 3944 return data
3945 3945
3946 3946 def __json__(self):
3947 3947 data = dict()
3948 3948 data.update(self.get_api_data())
3949 3949 return data
3950 3950
3951 3951
3952 3952 class ChangesetCommentHistory(Base, BaseModel):
3953 3953 __tablename__ = 'changeset_comments_history'
3954 3954 __table_args__ = (
3955 3955 Index('cch_comment_id_idx', 'comment_id'),
3956 3956 base_table_args,
3957 3957 )
3958 3958
3959 3959 comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True)
3960 3960 comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False)
3961 3961 version = Column("version", Integer(), nullable=False, default=0)
3962 3962 created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3963 3963 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3964 3964 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3965 3965 deleted = Column('deleted', Boolean(), default=False)
3966 3966
3967 3967 author = relationship('User', lazy='joined')
3968 3968 comment = relationship('ChangesetComment', cascade="all, delete")
3969 3969
3970 3970 @classmethod
3971 3971 def get_version(cls, comment_id):
3972 3972 q = Session().query(ChangesetCommentHistory).filter(
3973 3973 ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc())
3974 3974 if q.count() == 0:
3975 3975 return 1
3976 3976 elif q.count() >= q[0].version:
3977 3977 return q.count() + 1
3978 3978 else:
3979 3979 return q[0].version + 1
3980 3980
3981 3981
3982 3982 class ChangesetStatus(Base, BaseModel):
3983 3983 __tablename__ = 'changeset_statuses'
3984 3984 __table_args__ = (
3985 3985 Index('cs_revision_idx', 'revision'),
3986 3986 Index('cs_version_idx', 'version'),
3987 3987 UniqueConstraint('repo_id', 'revision', 'version'),
3988 3988 base_table_args
3989 3989 )
3990 3990
3991 3991 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3992 3992 STATUS_APPROVED = 'approved'
3993 3993 STATUS_REJECTED = 'rejected'
3994 3994 STATUS_UNDER_REVIEW = 'under_review'
3995 3995 CheckConstraint,
3996 3996 STATUSES = [
3997 3997 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3998 3998 (STATUS_APPROVED, _("Approved")),
3999 3999 (STATUS_REJECTED, _("Rejected")),
4000 4000 (STATUS_UNDER_REVIEW, _("Under Review")),
4001 4001 ]
4002 4002
4003 4003 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
4004 4004 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
4005 4005 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
4006 4006 revision = Column('revision', String(40), nullable=False)
4007 4007 status = Column('status', String(128), nullable=False, default=DEFAULT)
4008 4008 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
4009 4009 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
4010 4010 version = Column('version', Integer(), nullable=False, default=0)
4011 4011 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
4012 4012
4013 4013 author = relationship('User', lazy='select')
4014 4014 repo = relationship('Repository', lazy='select')
4015 4015 comment = relationship('ChangesetComment', lazy='select')
4016 4016 pull_request = relationship('PullRequest', lazy='select')
4017 4017
4018 4018 def __unicode__(self):
4019 4019 return u"<%s('%s[v%s]:%s')>" % (
4020 4020 self.__class__.__name__,
4021 4021 self.status, self.version, self.author
4022 4022 )
4023 4023
4024 4024 @classmethod
4025 4025 def get_status_lbl(cls, value):
4026 4026 return dict(cls.STATUSES).get(value)
4027 4027
4028 4028 @property
4029 4029 def status_lbl(self):
4030 4030 return ChangesetStatus.get_status_lbl(self.status)
4031 4031
4032 4032 def get_api_data(self):
4033 4033 status = self
4034 4034 data = {
4035 4035 'status_id': status.changeset_status_id,
4036 4036 'status': status.status,
4037 4037 }
4038 4038 return data
4039 4039
4040 4040 def __json__(self):
4041 4041 data = dict()
4042 4042 data.update(self.get_api_data())
4043 4043 return data
4044 4044
4045 4045
4046 4046 class _SetState(object):
4047 4047 """
4048 4048 Context processor allowing changing state for sensitive operation such as
4049 4049 pull request update or merge
4050 4050 """
4051 4051
4052 4052 def __init__(self, pull_request, pr_state, back_state=None):
4053 4053 self._pr = pull_request
4054 4054 self._org_state = back_state or pull_request.pull_request_state
4055 4055 self._pr_state = pr_state
4056 4056 self._current_state = None
4057 4057
4058 4058 def __enter__(self):
4059 4059 log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`',
4060 4060 self._pr, self._pr_state)
4061 4061 self.set_pr_state(self._pr_state)
4062 4062 return self
4063 4063
4064 4064 def __exit__(self, exc_type, exc_val, exc_tb):
4065 4065 if exc_val is not None:
4066 4066 log.error(traceback.format_exc(exc_tb))
4067 4067 return None
4068 4068
4069 4069 self.set_pr_state(self._org_state)
4070 4070 log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`',
4071 4071 self._pr, self._org_state)
4072 4072
4073 4073 @property
4074 4074 def state(self):
4075 4075 return self._current_state
4076 4076
4077 4077 def set_pr_state(self, pr_state):
4078 4078 try:
4079 4079 self._pr.pull_request_state = pr_state
4080 4080 Session().add(self._pr)
4081 4081 Session().commit()
4082 4082 self._current_state = pr_state
4083 4083 except Exception:
4084 4084 log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state)
4085 4085 raise
4086 4086
4087 4087
4088 4088 class _PullRequestBase(BaseModel):
4089 4089 """
4090 4090 Common attributes of pull request and version entries.
4091 4091 """
4092 4092
4093 4093 # .status values
4094 4094 STATUS_NEW = u'new'
4095 4095 STATUS_OPEN = u'open'
4096 4096 STATUS_CLOSED = u'closed'
4097 4097
4098 4098 # available states
4099 4099 STATE_CREATING = u'creating'
4100 4100 STATE_UPDATING = u'updating'
4101 4101 STATE_MERGING = u'merging'
4102 4102 STATE_CREATED = u'created'
4103 4103
4104 4104 title = Column('title', Unicode(255), nullable=True)
4105 4105 description = Column(
4106 4106 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
4107 4107 nullable=True)
4108 4108 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
4109 4109
4110 4110 # new/open/closed status of pull request (not approve/reject/etc)
4111 4111 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
4112 4112 created_on = Column(
4113 4113 'created_on', DateTime(timezone=False), nullable=False,
4114 4114 default=datetime.datetime.now)
4115 4115 updated_on = Column(
4116 4116 'updated_on', DateTime(timezone=False), nullable=False,
4117 4117 default=datetime.datetime.now)
4118 4118
4119 4119 pull_request_state = Column("pull_request_state", String(255), nullable=True)
4120 4120
4121 4121 @declared_attr
4122 4122 def user_id(cls):
4123 4123 return Column(
4124 4124 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
4125 4125 unique=None)
4126 4126
4127 4127 # 500 revisions max
4128 4128 _revisions = Column(
4129 4129 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
4130 4130
4131 4131 common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True)
4132 4132
4133 4133 @declared_attr
4134 4134 def source_repo_id(cls):
4135 4135 # TODO: dan: rename column to source_repo_id
4136 4136 return Column(
4137 4137 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4138 4138 nullable=False)
4139 4139
4140 4140 _source_ref = Column('org_ref', Unicode(255), nullable=False)
4141 4141
4142 4142 @hybrid_property
4143 4143 def source_ref(self):
4144 4144 return self._source_ref
4145 4145
4146 4146 @source_ref.setter
4147 4147 def source_ref(self, val):
4148 4148 parts = (val or '').split(':')
4149 4149 if len(parts) != 3:
4150 4150 raise ValueError(
4151 4151 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4152 4152 self._source_ref = safe_unicode(val)
4153 4153
4154 4154 _target_ref = Column('other_ref', Unicode(255), nullable=False)
4155 4155
4156 4156 @hybrid_property
4157 4157 def target_ref(self):
4158 4158 return self._target_ref
4159 4159
4160 4160 @target_ref.setter
4161 4161 def target_ref(self, val):
4162 4162 parts = (val or '').split(':')
4163 4163 if len(parts) != 3:
4164 4164 raise ValueError(
4165 4165 'Invalid reference format given: {}, expected X:Y:Z'.format(val))
4166 4166 self._target_ref = safe_unicode(val)
4167 4167
4168 4168 @declared_attr
4169 4169 def target_repo_id(cls):
4170 4170 # TODO: dan: rename column to target_repo_id
4171 4171 return Column(
4172 4172 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
4173 4173 nullable=False)
4174 4174
4175 4175 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
4176 4176
4177 4177 # TODO: dan: rename column to last_merge_source_rev
4178 4178 _last_merge_source_rev = Column(
4179 4179 'last_merge_org_rev', String(40), nullable=True)
4180 4180 # TODO: dan: rename column to last_merge_target_rev
4181 4181 _last_merge_target_rev = Column(
4182 4182 'last_merge_other_rev', String(40), nullable=True)
4183 4183 _last_merge_status = Column('merge_status', Integer(), nullable=True)
4184 4184 last_merge_metadata = Column(
4185 4185 'last_merge_metadata', MutationObj.as_mutable(
4186 4186 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4187 4187
4188 4188 merge_rev = Column('merge_rev', String(40), nullable=True)
4189 4189
4190 4190 reviewer_data = Column(
4191 4191 'reviewer_data_json', MutationObj.as_mutable(
4192 4192 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4193 4193
4194 4194 @property
4195 4195 def reviewer_data_json(self):
4196 4196 return json.dumps(self.reviewer_data)
4197 4197
4198 4198 @property
4199 4199 def last_merge_metadata_parsed(self):
4200 4200 metadata = {}
4201 4201 if not self.last_merge_metadata:
4202 4202 return metadata
4203 4203
4204 4204 if hasattr(self.last_merge_metadata, 'de_coerce'):
4205 4205 for k, v in self.last_merge_metadata.de_coerce().items():
4206 4206 if k in ['target_ref', 'source_ref']:
4207 4207 metadata[k] = Reference(v['type'], v['name'], v['commit_id'])
4208 4208 else:
4209 4209 if hasattr(v, 'de_coerce'):
4210 4210 metadata[k] = v.de_coerce()
4211 4211 else:
4212 4212 metadata[k] = v
4213 4213 return metadata
4214 4214
4215 4215 @property
4216 4216 def work_in_progress(self):
4217 4217 """checks if pull request is work in progress by checking the title"""
4218 4218 title = self.title.upper()
4219 4219 if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title):
4220 4220 return True
4221 4221 return False
4222 4222
4223 4223 @property
4224 4224 def title_safe(self):
4225 4225 return self.title\
4226 4226 .replace('{', '{{')\
4227 4227 .replace('}', '}}')
4228 4228
4229 4229 @hybrid_property
4230 4230 def description_safe(self):
4231 4231 from rhodecode.lib import helpers as h
4232 4232 return h.escape(self.description)
4233 4233
4234 4234 @hybrid_property
4235 4235 def revisions(self):
4236 4236 return self._revisions.split(':') if self._revisions else []
4237 4237
4238 4238 @revisions.setter
4239 4239 def revisions(self, val):
4240 4240 self._revisions = u':'.join(val)
4241 4241
4242 4242 @hybrid_property
4243 4243 def last_merge_status(self):
4244 4244 return safe_int(self._last_merge_status)
4245 4245
4246 4246 @last_merge_status.setter
4247 4247 def last_merge_status(self, val):
4248 4248 self._last_merge_status = val
4249 4249
4250 4250 @declared_attr
4251 4251 def author(cls):
4252 4252 return relationship('User', lazy='joined')
4253 4253
4254 4254 @declared_attr
4255 4255 def source_repo(cls):
4256 4256 return relationship(
4257 4257 'Repository',
4258 4258 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
4259 4259
4260 4260 @property
4261 4261 def source_ref_parts(self):
4262 4262 return self.unicode_to_reference(self.source_ref)
4263 4263
4264 4264 @declared_attr
4265 4265 def target_repo(cls):
4266 4266 return relationship(
4267 4267 'Repository',
4268 4268 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
4269 4269
4270 4270 @property
4271 4271 def target_ref_parts(self):
4272 4272 return self.unicode_to_reference(self.target_ref)
4273 4273
4274 4274 @property
4275 4275 def shadow_merge_ref(self):
4276 4276 return self.unicode_to_reference(self._shadow_merge_ref)
4277 4277
4278 4278 @shadow_merge_ref.setter
4279 4279 def shadow_merge_ref(self, ref):
4280 4280 self._shadow_merge_ref = self.reference_to_unicode(ref)
4281 4281
4282 4282 @staticmethod
4283 4283 def unicode_to_reference(raw):
4284 4284 return unicode_to_reference(raw)
4285 4285
4286 4286 @staticmethod
4287 4287 def reference_to_unicode(ref):
4288 4288 return reference_to_unicode(ref)
4289 4289
4290 4290 def get_api_data(self, with_merge_state=True):
4291 4291 from rhodecode.model.pull_request import PullRequestModel
4292 4292
4293 4293 pull_request = self
4294 4294 if with_merge_state:
4295 4295 merge_response, merge_status, msg = \
4296 4296 PullRequestModel().merge_status(pull_request)
4297 4297 merge_state = {
4298 4298 'status': merge_status,
4299 4299 'message': safe_unicode(msg),
4300 4300 }
4301 4301 else:
4302 4302 merge_state = {'status': 'not_available',
4303 4303 'message': 'not_available'}
4304 4304
4305 4305 merge_data = {
4306 4306 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
4307 4307 'reference': (
4308 4308 pull_request.shadow_merge_ref._asdict()
4309 4309 if pull_request.shadow_merge_ref else None),
4310 4310 }
4311 4311
4312 4312 data = {
4313 4313 'pull_request_id': pull_request.pull_request_id,
4314 4314 'url': PullRequestModel().get_url(pull_request),
4315 4315 'title': pull_request.title,
4316 4316 'description': pull_request.description,
4317 4317 'status': pull_request.status,
4318 4318 'state': pull_request.pull_request_state,
4319 4319 'created_on': pull_request.created_on,
4320 4320 'updated_on': pull_request.updated_on,
4321 4321 'commit_ids': pull_request.revisions,
4322 4322 'review_status': pull_request.calculated_review_status(),
4323 4323 'mergeable': merge_state,
4324 4324 'source': {
4325 4325 'clone_url': pull_request.source_repo.clone_url(),
4326 4326 'repository': pull_request.source_repo.repo_name,
4327 4327 'reference': {
4328 4328 'name': pull_request.source_ref_parts.name,
4329 4329 'type': pull_request.source_ref_parts.type,
4330 4330 'commit_id': pull_request.source_ref_parts.commit_id,
4331 4331 },
4332 4332 },
4333 4333 'target': {
4334 4334 'clone_url': pull_request.target_repo.clone_url(),
4335 4335 'repository': pull_request.target_repo.repo_name,
4336 4336 'reference': {
4337 4337 'name': pull_request.target_ref_parts.name,
4338 4338 'type': pull_request.target_ref_parts.type,
4339 4339 'commit_id': pull_request.target_ref_parts.commit_id,
4340 4340 },
4341 4341 },
4342 4342 'merge': merge_data,
4343 4343 'author': pull_request.author.get_api_data(include_secrets=False,
4344 4344 details='basic'),
4345 4345 'reviewers': [
4346 4346 {
4347 4347 'user': reviewer.get_api_data(include_secrets=False,
4348 4348 details='basic'),
4349 4349 'reasons': reasons,
4350 4350 'review_status': st[0][1].status if st else 'not_reviewed',
4351 4351 }
4352 4352 for obj, reviewer, reasons, mandatory, st in
4353 4353 pull_request.reviewers_statuses()
4354 4354 ]
4355 4355 }
4356 4356
4357 4357 return data
4358 4358
4359 4359 def set_state(self, pull_request_state, final_state=None):
4360 4360 """
4361 4361 # goes from initial state to updating to initial state.
4362 4362 # initial state can be changed by specifying back_state=
4363 4363 with pull_request_obj.set_state(PullRequest.STATE_UPDATING):
4364 4364 pull_request.merge()
4365 4365
4366 4366 :param pull_request_state:
4367 4367 :param final_state:
4368 4368
4369 4369 """
4370 4370
4371 4371 return _SetState(self, pull_request_state, back_state=final_state)
4372 4372
4373 4373
4374 4374 class PullRequest(Base, _PullRequestBase):
4375 4375 __tablename__ = 'pull_requests'
4376 4376 __table_args__ = (
4377 4377 base_table_args,
4378 4378 )
4379 4379 LATEST_VER = 'latest'
4380 4380
4381 4381 pull_request_id = Column(
4382 4382 'pull_request_id', Integer(), nullable=False, primary_key=True)
4383 4383
4384 4384 def __repr__(self):
4385 4385 if self.pull_request_id:
4386 4386 return '<DB:PullRequest #%s>' % self.pull_request_id
4387 4387 else:
4388 4388 return '<DB:PullRequest at %#x>' % id(self)
4389 4389
4390 4390 reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan")
4391 4391 statuses = relationship('ChangesetStatus', cascade="all, delete-orphan")
4392 4392 comments = relationship('ChangesetComment', cascade="all, delete-orphan")
4393 4393 versions = relationship('PullRequestVersion', cascade="all, delete-orphan",
4394 4394 lazy='dynamic')
4395 4395
4396 4396 @classmethod
4397 4397 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
4398 4398 internal_methods=None):
4399 4399
4400 4400 class PullRequestDisplay(object):
4401 4401 """
4402 4402 Special object wrapper for showing PullRequest data via Versions
4403 4403 It mimics PR object as close as possible. This is read only object
4404 4404 just for display
4405 4405 """
4406 4406
4407 4407 def __init__(self, attrs, internal=None):
4408 4408 self.attrs = attrs
4409 4409 # internal have priority over the given ones via attrs
4410 4410 self.internal = internal or ['versions']
4411 4411
4412 4412 def __getattr__(self, item):
4413 4413 if item in self.internal:
4414 4414 return getattr(self, item)
4415 4415 try:
4416 4416 return self.attrs[item]
4417 4417 except KeyError:
4418 4418 raise AttributeError(
4419 4419 '%s object has no attribute %s' % (self, item))
4420 4420
4421 4421 def __repr__(self):
4422 4422 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
4423 4423
4424 4424 def versions(self):
4425 4425 return pull_request_obj.versions.order_by(
4426 4426 PullRequestVersion.pull_request_version_id).all()
4427 4427
4428 4428 def is_closed(self):
4429 4429 return pull_request_obj.is_closed()
4430 4430
4431 4431 def is_state_changing(self):
4432 4432 return pull_request_obj.is_state_changing()
4433 4433
4434 4434 @property
4435 4435 def pull_request_version_id(self):
4436 4436 return getattr(pull_request_obj, 'pull_request_version_id', None)
4437 4437
4438 4438 @property
4439 4439 def pull_request_last_version(self):
4440 4440 return pull_request_obj.pull_request_last_version
4441 4441
4442 4442 attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False))
4443 4443
4444 4444 attrs.author = StrictAttributeDict(
4445 4445 pull_request_obj.author.get_api_data())
4446 4446 if pull_request_obj.target_repo:
4447 4447 attrs.target_repo = StrictAttributeDict(
4448 4448 pull_request_obj.target_repo.get_api_data())
4449 4449 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
4450 4450
4451 4451 if pull_request_obj.source_repo:
4452 4452 attrs.source_repo = StrictAttributeDict(
4453 4453 pull_request_obj.source_repo.get_api_data())
4454 4454 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
4455 4455
4456 4456 attrs.source_ref_parts = pull_request_obj.source_ref_parts
4457 4457 attrs.target_ref_parts = pull_request_obj.target_ref_parts
4458 4458 attrs.revisions = pull_request_obj.revisions
4459 4459 attrs.common_ancestor_id = pull_request_obj.common_ancestor_id
4460 4460 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
4461 4461 attrs.reviewer_data = org_pull_request_obj.reviewer_data
4462 4462 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
4463 4463
4464 4464 return PullRequestDisplay(attrs, internal=internal_methods)
4465 4465
4466 4466 def is_closed(self):
4467 4467 return self.status == self.STATUS_CLOSED
4468 4468
4469 4469 def is_state_changing(self):
4470 4470 return self.pull_request_state != PullRequest.STATE_CREATED
4471 4471
4472 4472 def __json__(self):
4473 4473 return {
4474 4474 'revisions': self.revisions,
4475 4475 'versions': self.versions_count
4476 4476 }
4477 4477
4478 4478 def calculated_review_status(self):
4479 4479 from rhodecode.model.changeset_status import ChangesetStatusModel
4480 4480 return ChangesetStatusModel().calculated_review_status(self)
4481 4481
4482 4482 def reviewers_statuses(self):
4483 4483 from rhodecode.model.changeset_status import ChangesetStatusModel
4484 4484 return ChangesetStatusModel().reviewers_statuses(self)
4485 4485
4486 4486 def get_pull_request_reviewers(self, role=None):
4487 4487 qry = PullRequestReviewers.query()\
4488 4488 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)
4489 4489 if role:
4490 4490 qry = qry.filter(PullRequestReviewers.role == role)
4491 4491
4492 4492 return qry.all()
4493 4493
4494 4494 @property
4495 4495 def reviewers_count(self):
4496 4496 qry = PullRequestReviewers.query()\
4497 4497 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4498 4498 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER)
4499 4499 return qry.count()
4500 4500
4501 4501 @property
4502 4502 def observers_count(self):
4503 4503 qry = PullRequestReviewers.query()\
4504 4504 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4505 4505 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)
4506 4506 return qry.count()
4507 4507
4508 4508 def observers(self):
4509 4509 qry = PullRequestReviewers.query()\
4510 4510 .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\
4511 4511 .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\
4512 4512 .all()
4513 4513
4514 4514 for entry in qry:
4515 4515 yield entry, entry.user
4516 4516
4517 4517 @property
4518 4518 def workspace_id(self):
4519 4519 from rhodecode.model.pull_request import PullRequestModel
4520 4520 return PullRequestModel()._workspace_id(self)
4521 4521
4522 4522 def get_shadow_repo(self):
4523 4523 workspace_id = self.workspace_id
4524 4524 shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id)
4525 4525 if os.path.isdir(shadow_repository_path):
4526 4526 vcs_obj = self.target_repo.scm_instance()
4527 4527 return vcs_obj.get_shadow_instance(shadow_repository_path)
4528 4528
4529 4529 @property
4530 4530 def versions_count(self):
4531 4531 """
4532 4532 return number of versions this PR have, e.g a PR that once been
4533 4533 updated will have 2 versions
4534 4534 """
4535 4535 return self.versions.count() + 1
4536 4536
4537 4537 @property
4538 4538 def pull_request_last_version(self):
4539 4539 return self.versions_count
4540 4540
4541 4541
4542 4542 class PullRequestVersion(Base, _PullRequestBase):
4543 4543 __tablename__ = 'pull_request_versions'
4544 4544 __table_args__ = (
4545 4545 base_table_args,
4546 4546 )
4547 4547
4548 4548 pull_request_version_id = Column(
4549 4549 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
4550 4550 pull_request_id = Column(
4551 4551 'pull_request_id', Integer(),
4552 4552 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4553 4553 pull_request = relationship('PullRequest')
4554 4554
4555 4555 def __repr__(self):
4556 4556 if self.pull_request_version_id:
4557 4557 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
4558 4558 else:
4559 4559 return '<DB:PullRequestVersion at %#x>' % id(self)
4560 4560
4561 4561 @property
4562 4562 def reviewers(self):
4563 4563 return self.pull_request.reviewers
4564 4564 @property
4565 4565 def reviewers(self):
4566 4566 return self.pull_request.reviewers
4567 4567
4568 4568 @property
4569 4569 def versions(self):
4570 4570 return self.pull_request.versions
4571 4571
4572 4572 def is_closed(self):
4573 4573 # calculate from original
4574 4574 return self.pull_request.status == self.STATUS_CLOSED
4575 4575
4576 4576 def is_state_changing(self):
4577 4577 return self.pull_request.pull_request_state != PullRequest.STATE_CREATED
4578 4578
4579 4579 def calculated_review_status(self):
4580 4580 return self.pull_request.calculated_review_status()
4581 4581
4582 4582 def reviewers_statuses(self):
4583 4583 return self.pull_request.reviewers_statuses()
4584 4584
4585 4585 def observers(self):
4586 4586 return self.pull_request.observers()
4587 4587
4588 4588
4589 4589 class PullRequestReviewers(Base, BaseModel):
4590 4590 __tablename__ = 'pull_request_reviewers'
4591 4591 __table_args__ = (
4592 4592 base_table_args,
4593 4593 )
4594 4594 ROLE_REVIEWER = u'reviewer'
4595 4595 ROLE_OBSERVER = u'observer'
4596 4596 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
4597 4597
4598 4598 @hybrid_property
4599 4599 def reasons(self):
4600 4600 if not self._reasons:
4601 4601 return []
4602 4602 return self._reasons
4603 4603
4604 4604 @reasons.setter
4605 4605 def reasons(self, val):
4606 4606 val = val or []
4607 4607 if any(not isinstance(x, compat.string_types) for x in val):
4608 4608 raise Exception('invalid reasons type, must be list of strings')
4609 4609 self._reasons = val
4610 4610
4611 4611 pull_requests_reviewers_id = Column(
4612 4612 'pull_requests_reviewers_id', Integer(), nullable=False,
4613 4613 primary_key=True)
4614 4614 pull_request_id = Column(
4615 4615 "pull_request_id", Integer(),
4616 4616 ForeignKey('pull_requests.pull_request_id'), nullable=False)
4617 4617 user_id = Column(
4618 4618 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
4619 4619 _reasons = Column(
4620 4620 'reason', MutationList.as_mutable(
4621 4621 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
4622 4622
4623 4623 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4624 4624 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
4625 4625
4626 4626 user = relationship('User')
4627 4627 pull_request = relationship('PullRequest')
4628 4628
4629 4629 rule_data = Column(
4630 4630 'rule_data_json',
4631 4631 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
4632 4632
4633 4633 def rule_user_group_data(self):
4634 4634 """
4635 4635 Returns the voting user group rule data for this reviewer
4636 4636 """
4637 4637
4638 4638 if self.rule_data and 'vote_rule' in self.rule_data:
4639 4639 user_group_data = {}
4640 4640 if 'rule_user_group_entry_id' in self.rule_data:
4641 4641 # means a group with voting rules !
4642 4642 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
4643 4643 user_group_data['name'] = self.rule_data['rule_name']
4644 4644 user_group_data['vote_rule'] = self.rule_data['vote_rule']
4645 4645
4646 4646 return user_group_data
4647 4647
4648 4648 @classmethod
4649 4649 def get_pull_request_reviewers(cls, pull_request_id, role=None):
4650 4650 qry = PullRequestReviewers.query()\
4651 4651 .filter(PullRequestReviewers.pull_request_id == pull_request_id)
4652 4652 if role:
4653 4653 qry = qry.filter(PullRequestReviewers.role == role)
4654 4654
4655 4655 return qry.all()
4656 4656
4657 4657 def __unicode__(self):
4658 4658 return u"<%s('id:%s')>" % (self.__class__.__name__,
4659 4659 self.pull_requests_reviewers_id)
4660 4660
4661 4661
4662 4662 class Notification(Base, BaseModel):
4663 4663 __tablename__ = 'notifications'
4664 4664 __table_args__ = (
4665 4665 Index('notification_type_idx', 'type'),
4666 4666 base_table_args,
4667 4667 )
4668 4668
4669 4669 TYPE_CHANGESET_COMMENT = u'cs_comment'
4670 4670 TYPE_MESSAGE = u'message'
4671 4671 TYPE_MENTION = u'mention'
4672 4672 TYPE_REGISTRATION = u'registration'
4673 4673 TYPE_PULL_REQUEST = u'pull_request'
4674 4674 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
4675 4675 TYPE_PULL_REQUEST_UPDATE = u'pull_request_update'
4676 4676
4677 4677 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
4678 4678 subject = Column('subject', Unicode(512), nullable=True)
4679 4679 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4680 4680 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
4681 4681 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4682 4682 type_ = Column('type', Unicode(255))
4683 4683
4684 4684 created_by_user = relationship('User')
4685 4685 notifications_to_users = relationship('UserNotification', lazy='joined',
4686 4686 cascade="all, delete-orphan")
4687 4687
4688 4688 @property
4689 4689 def recipients(self):
4690 4690 return [x.user for x in UserNotification.query()\
4691 4691 .filter(UserNotification.notification == self)\
4692 4692 .order_by(UserNotification.user_id.asc()).all()]
4693 4693
4694 4694 @classmethod
4695 4695 def create(cls, created_by, subject, body, recipients, type_=None):
4696 4696 if type_ is None:
4697 4697 type_ = Notification.TYPE_MESSAGE
4698 4698
4699 4699 notification = cls()
4700 4700 notification.created_by_user = created_by
4701 4701 notification.subject = subject
4702 4702 notification.body = body
4703 4703 notification.type_ = type_
4704 4704 notification.created_on = datetime.datetime.now()
4705 4705
4706 4706 # For each recipient link the created notification to his account
4707 4707 for u in recipients:
4708 4708 assoc = UserNotification()
4709 4709 assoc.user_id = u.user_id
4710 4710 assoc.notification = notification
4711 4711
4712 4712 # if created_by is inside recipients mark his notification
4713 4713 # as read
4714 4714 if u.user_id == created_by.user_id:
4715 4715 assoc.read = True
4716 4716 Session().add(assoc)
4717 4717
4718 4718 Session().add(notification)
4719 4719
4720 4720 return notification
4721 4721
4722 4722
4723 4723 class UserNotification(Base, BaseModel):
4724 4724 __tablename__ = 'user_to_notification'
4725 4725 __table_args__ = (
4726 4726 UniqueConstraint('user_id', 'notification_id'),
4727 4727 base_table_args
4728 4728 )
4729 4729
4730 4730 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4731 4731 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
4732 4732 read = Column('read', Boolean, default=False)
4733 4733 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
4734 4734
4735 4735 user = relationship('User', lazy="joined")
4736 4736 notification = relationship('Notification', lazy="joined",
4737 4737 order_by=lambda: Notification.created_on.desc(),)
4738 4738
4739 4739 def mark_as_read(self):
4740 4740 self.read = True
4741 4741 Session().add(self)
4742 4742
4743 4743
4744 4744 class UserNotice(Base, BaseModel):
4745 4745 __tablename__ = 'user_notices'
4746 4746 __table_args__ = (
4747 4747 base_table_args
4748 4748 )
4749 4749
4750 4750 NOTIFICATION_TYPE_MESSAGE = 'message'
4751 4751 NOTIFICATION_TYPE_NOTICE = 'notice'
4752 4752
4753 4753 NOTIFICATION_LEVEL_INFO = 'info'
4754 4754 NOTIFICATION_LEVEL_WARNING = 'warning'
4755 4755 NOTIFICATION_LEVEL_ERROR = 'error'
4756 4756
4757 4757 user_notice_id = Column('gist_id', Integer(), primary_key=True)
4758 4758
4759 4759 notice_subject = Column('notice_subject', Unicode(512), nullable=True)
4760 4760 notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
4761 4761
4762 4762 notice_read = Column('notice_read', Boolean, default=False)
4763 4763
4764 4764 notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO)
4765 4765 notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE)
4766 4766
4767 4767 notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True)
4768 4768 notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4769 4769
4770 4770 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'))
4771 4771 user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id')
4772 4772
4773 4773 @classmethod
4774 4774 def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False):
4775 4775
4776 4776 if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR,
4777 4777 cls.NOTIFICATION_LEVEL_WARNING,
4778 4778 cls.NOTIFICATION_LEVEL_INFO]:
4779 4779 return
4780 4780
4781 4781 from rhodecode.model.user import UserModel
4782 4782 user = UserModel().get_user(user)
4783 4783
4784 4784 new_notice = UserNotice()
4785 4785 if not allow_duplicate:
4786 4786 existing_msg = UserNotice().query() \
4787 4787 .filter(UserNotice.user == user) \
4788 4788 .filter(UserNotice.notice_body == body) \
4789 4789 .filter(UserNotice.notice_read == false()) \
4790 4790 .scalar()
4791 4791 if existing_msg:
4792 4792 log.warning('Ignoring duplicate notice for user %s', user)
4793 4793 return
4794 4794
4795 4795 new_notice.user = user
4796 4796 new_notice.notice_subject = subject
4797 4797 new_notice.notice_body = body
4798 4798 new_notice.notification_level = notice_level
4799 4799 Session().add(new_notice)
4800 4800 Session().commit()
4801 4801
4802 4802
4803 4803 class Gist(Base, BaseModel):
4804 4804 __tablename__ = 'gists'
4805 4805 __table_args__ = (
4806 4806 Index('g_gist_access_id_idx', 'gist_access_id'),
4807 4807 Index('g_created_on_idx', 'created_on'),
4808 4808 base_table_args
4809 4809 )
4810 4810
4811 4811 GIST_PUBLIC = u'public'
4812 4812 GIST_PRIVATE = u'private'
4813 4813 DEFAULT_FILENAME = u'gistfile1.txt'
4814 4814
4815 4815 ACL_LEVEL_PUBLIC = u'acl_public'
4816 4816 ACL_LEVEL_PRIVATE = u'acl_private'
4817 4817
4818 4818 gist_id = Column('gist_id', Integer(), primary_key=True)
4819 4819 gist_access_id = Column('gist_access_id', Unicode(250))
4820 4820 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
4821 4821 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
4822 4822 gist_expires = Column('gist_expires', Float(53), nullable=False)
4823 4823 gist_type = Column('gist_type', Unicode(128), nullable=False)
4824 4824 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4825 4825 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4826 4826 acl_level = Column('acl_level', Unicode(128), nullable=True)
4827 4827
4828 4828 owner = relationship('User')
4829 4829
4830 4830 def __repr__(self):
4831 4831 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
4832 4832
4833 4833 @hybrid_property
4834 4834 def description_safe(self):
4835 4835 from rhodecode.lib import helpers as h
4836 4836 return h.escape(self.gist_description)
4837 4837
4838 4838 @classmethod
4839 4839 def get_or_404(cls, id_):
4840 4840 from pyramid.httpexceptions import HTTPNotFound
4841 4841
4842 4842 res = cls.query().filter(cls.gist_access_id == id_).scalar()
4843 4843 if not res:
4844 4844 log.debug('WARN: No DB entry with id %s', id_)
4845 4845 raise HTTPNotFound()
4846 4846 return res
4847 4847
4848 4848 @classmethod
4849 4849 def get_by_access_id(cls, gist_access_id):
4850 4850 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
4851 4851
4852 4852 def gist_url(self):
4853 4853 from rhodecode.model.gist import GistModel
4854 4854 return GistModel().get_url(self)
4855 4855
4856 4856 @classmethod
4857 4857 def base_path(cls):
4858 4858 """
4859 4859 Returns base path when all gists are stored
4860 4860
4861 4861 :param cls:
4862 4862 """
4863 4863 from rhodecode.model.gist import GIST_STORE_LOC
4864 4864 q = Session().query(RhodeCodeUi)\
4865 4865 .filter(RhodeCodeUi.ui_key == URL_SEP)
4866 4866 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
4867 4867 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
4868 4868
4869 4869 def get_api_data(self):
4870 4870 """
4871 4871 Common function for generating gist related data for API
4872 4872 """
4873 4873 gist = self
4874 4874 data = {
4875 4875 'gist_id': gist.gist_id,
4876 4876 'type': gist.gist_type,
4877 4877 'access_id': gist.gist_access_id,
4878 4878 'description': gist.gist_description,
4879 4879 'url': gist.gist_url(),
4880 4880 'expires': gist.gist_expires,
4881 4881 'created_on': gist.created_on,
4882 4882 'modified_at': gist.modified_at,
4883 4883 'content': None,
4884 4884 'acl_level': gist.acl_level,
4885 4885 }
4886 4886 return data
4887 4887
4888 4888 def __json__(self):
4889 4889 data = dict(
4890 4890 )
4891 4891 data.update(self.get_api_data())
4892 4892 return data
4893 4893 # SCM functions
4894 4894
4895 4895 def scm_instance(self, **kwargs):
4896 4896 """
4897 4897 Get an instance of VCS Repository
4898 4898
4899 4899 :param kwargs:
4900 4900 """
4901 4901 from rhodecode.model.gist import GistModel
4902 4902 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4903 4903 return get_vcs_instance(
4904 4904 repo_path=safe_str(full_repo_path), create=False,
4905 4905 _vcs_alias=GistModel.vcs_backend)
4906 4906
4907 4907
4908 4908 class ExternalIdentity(Base, BaseModel):
4909 4909 __tablename__ = 'external_identities'
4910 4910 __table_args__ = (
4911 4911 Index('local_user_id_idx', 'local_user_id'),
4912 4912 Index('external_id_idx', 'external_id'),
4913 4913 base_table_args
4914 4914 )
4915 4915
4916 4916 external_id = Column('external_id', Unicode(255), default=u'', primary_key=True)
4917 4917 external_username = Column('external_username', Unicode(1024), default=u'')
4918 4918 local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
4919 4919 provider_name = Column('provider_name', Unicode(255), default=u'', primary_key=True)
4920 4920 access_token = Column('access_token', String(1024), default=u'')
4921 4921 alt_token = Column('alt_token', String(1024), default=u'')
4922 4922 token_secret = Column('token_secret', String(1024), default=u'')
4923 4923
4924 4924 @classmethod
4925 4925 def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None):
4926 4926 """
4927 4927 Returns ExternalIdentity instance based on search params
4928 4928
4929 4929 :param external_id:
4930 4930 :param provider_name:
4931 4931 :return: ExternalIdentity
4932 4932 """
4933 4933 query = cls.query()
4934 4934 query = query.filter(cls.external_id == external_id)
4935 4935 query = query.filter(cls.provider_name == provider_name)
4936 4936 if local_user_id:
4937 4937 query = query.filter(cls.local_user_id == local_user_id)
4938 4938 return query.first()
4939 4939
4940 4940 @classmethod
4941 4941 def user_by_external_id_and_provider(cls, external_id, provider_name):
4942 4942 """
4943 4943 Returns User instance based on search params
4944 4944
4945 4945 :param external_id:
4946 4946 :param provider_name:
4947 4947 :return: User
4948 4948 """
4949 4949 query = User.query()
4950 4950 query = query.filter(cls.external_id == external_id)
4951 4951 query = query.filter(cls.provider_name == provider_name)
4952 4952 query = query.filter(User.user_id == cls.local_user_id)
4953 4953 return query.first()
4954 4954
4955 4955 @classmethod
4956 4956 def by_local_user_id(cls, local_user_id):
4957 4957 """
4958 4958 Returns all tokens for user
4959 4959
4960 4960 :param local_user_id:
4961 4961 :return: ExternalIdentity
4962 4962 """
4963 4963 query = cls.query()
4964 4964 query = query.filter(cls.local_user_id == local_user_id)
4965 4965 return query
4966 4966
4967 4967 @classmethod
4968 4968 def load_provider_plugin(cls, plugin_id):
4969 4969 from rhodecode.authentication.base import loadplugin
4970 4970 _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id)
4971 4971 auth_plugin = loadplugin(_plugin_id)
4972 4972 return auth_plugin
4973 4973
4974 4974
4975 4975 class Integration(Base, BaseModel):
4976 4976 __tablename__ = 'integrations'
4977 4977 __table_args__ = (
4978 4978 base_table_args
4979 4979 )
4980 4980
4981 4981 integration_id = Column('integration_id', Integer(), primary_key=True)
4982 4982 integration_type = Column('integration_type', String(255))
4983 4983 enabled = Column('enabled', Boolean(), nullable=False)
4984 4984 name = Column('name', String(255), nullable=False)
4985 4985 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4986 4986 default=False)
4987 4987
4988 4988 settings = Column(
4989 4989 'settings_json', MutationObj.as_mutable(
4990 4990 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4991 4991 repo_id = Column(
4992 4992 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4993 4993 nullable=True, unique=None, default=None)
4994 4994 repo = relationship('Repository', lazy='joined')
4995 4995
4996 4996 repo_group_id = Column(
4997 4997 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4998 4998 nullable=True, unique=None, default=None)
4999 4999 repo_group = relationship('RepoGroup', lazy='joined')
5000 5000
5001 5001 @property
5002 5002 def scope(self):
5003 5003 if self.repo:
5004 5004 return repr(self.repo)
5005 5005 if self.repo_group:
5006 5006 if self.child_repos_only:
5007 5007 return repr(self.repo_group) + ' (child repos only)'
5008 5008 else:
5009 5009 return repr(self.repo_group) + ' (recursive)'
5010 5010 if self.child_repos_only:
5011 5011 return 'root_repos'
5012 5012 return 'global'
5013 5013
5014 5014 def __repr__(self):
5015 5015 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
5016 5016
5017 5017
5018 5018 class RepoReviewRuleUser(Base, BaseModel):
5019 5019 __tablename__ = 'repo_review_rules_users'
5020 5020 __table_args__ = (
5021 5021 base_table_args
5022 5022 )
5023 5023 ROLE_REVIEWER = u'reviewer'
5024 5024 ROLE_OBSERVER = u'observer'
5025 5025 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5026 5026
5027 5027 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
5028 5028 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5029 5029 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
5030 5030 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5031 5031 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5032 5032 user = relationship('User')
5033 5033
5034 5034 def rule_data(self):
5035 5035 return {
5036 5036 'mandatory': self.mandatory,
5037 5037 'role': self.role,
5038 5038 }
5039 5039
5040 5040
5041 5041 class RepoReviewRuleUserGroup(Base, BaseModel):
5042 5042 __tablename__ = 'repo_review_rules_users_groups'
5043 5043 __table_args__ = (
5044 5044 base_table_args
5045 5045 )
5046 5046
5047 5047 VOTE_RULE_ALL = -1
5048 5048 ROLE_REVIEWER = u'reviewer'
5049 5049 ROLE_OBSERVER = u'observer'
5050 5050 ROLES = [ROLE_REVIEWER, ROLE_OBSERVER]
5051 5051
5052 5052 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
5053 5053 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
5054 5054 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False)
5055 5055 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
5056 5056 role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER)
5057 5057 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
5058 5058 users_group = relationship('UserGroup')
5059 5059
5060 5060 def rule_data(self):
5061 5061 return {
5062 5062 'mandatory': self.mandatory,
5063 5063 'role': self.role,
5064 5064 'vote_rule': self.vote_rule
5065 5065 }
5066 5066
5067 5067 @property
5068 5068 def vote_rule_label(self):
5069 5069 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
5070 5070 return 'all must vote'
5071 5071 else:
5072 5072 return 'min. vote {}'.format(self.vote_rule)
5073 5073
5074 5074
5075 5075 class RepoReviewRule(Base, BaseModel):
5076 5076 __tablename__ = 'repo_review_rules'
5077 5077 __table_args__ = (
5078 5078 base_table_args
5079 5079 )
5080 5080
5081 5081 repo_review_rule_id = Column(
5082 5082 'repo_review_rule_id', Integer(), primary_key=True)
5083 5083 repo_id = Column(
5084 5084 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
5085 5085 repo = relationship('Repository', backref='review_rules')
5086 5086
5087 5087 review_rule_name = Column('review_rule_name', String(255))
5088 5088 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5089 5089 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5090 5090 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
5091 5091
5092 5092 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
5093 5093
5094 5094 # Legacy fields, just for backward compat
5095 5095 _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
5096 5096 _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
5097 5097
5098 5098 pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5099 5099 commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True)
5100 5100
5101 5101 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
5102 5102
5103 5103 rule_users = relationship('RepoReviewRuleUser')
5104 5104 rule_user_groups = relationship('RepoReviewRuleUserGroup')
5105 5105
5106 5106 def _validate_pattern(self, value):
5107 5107 re.compile('^' + glob2re(value) + '$')
5108 5108
5109 5109 @hybrid_property
5110 5110 def source_branch_pattern(self):
5111 5111 return self._branch_pattern or '*'
5112 5112
5113 5113 @source_branch_pattern.setter
5114 5114 def source_branch_pattern(self, value):
5115 5115 self._validate_pattern(value)
5116 5116 self._branch_pattern = value or '*'
5117 5117
5118 5118 @hybrid_property
5119 5119 def target_branch_pattern(self):
5120 5120 return self._target_branch_pattern or '*'
5121 5121
5122 5122 @target_branch_pattern.setter
5123 5123 def target_branch_pattern(self, value):
5124 5124 self._validate_pattern(value)
5125 5125 self._target_branch_pattern = value or '*'
5126 5126
5127 5127 @hybrid_property
5128 5128 def file_pattern(self):
5129 5129 return self._file_pattern or '*'
5130 5130
5131 5131 @file_pattern.setter
5132 5132 def file_pattern(self, value):
5133 5133 self._validate_pattern(value)
5134 5134 self._file_pattern = value or '*'
5135 5135
5136 5136 @hybrid_property
5137 5137 def forbid_pr_author_to_review(self):
5138 5138 return self.pr_author == 'forbid_pr_author'
5139 5139
5140 5140 @hybrid_property
5141 5141 def include_pr_author_to_review(self):
5142 5142 return self.pr_author == 'include_pr_author'
5143 5143
5144 5144 @hybrid_property
5145 5145 def forbid_commit_author_to_review(self):
5146 5146 return self.commit_author == 'forbid_commit_author'
5147 5147
5148 5148 @hybrid_property
5149 5149 def include_commit_author_to_review(self):
5150 5150 return self.commit_author == 'include_commit_author'
5151 5151
5152 5152 def matches(self, source_branch, target_branch, files_changed):
5153 5153 """
5154 5154 Check if this review rule matches a branch/files in a pull request
5155 5155
5156 5156 :param source_branch: source branch name for the commit
5157 5157 :param target_branch: target branch name for the commit
5158 5158 :param files_changed: list of file paths changed in the pull request
5159 5159 """
5160 5160
5161 5161 source_branch = source_branch or ''
5162 5162 target_branch = target_branch or ''
5163 5163 files_changed = files_changed or []
5164 5164
5165 5165 branch_matches = True
5166 5166 if source_branch or target_branch:
5167 5167 if self.source_branch_pattern == '*':
5168 5168 source_branch_match = True
5169 5169 else:
5170 5170 if self.source_branch_pattern.startswith('re:'):
5171 5171 source_pattern = self.source_branch_pattern[3:]
5172 5172 else:
5173 5173 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
5174 5174 source_branch_regex = re.compile(source_pattern)
5175 5175 source_branch_match = bool(source_branch_regex.search(source_branch))
5176 5176 if self.target_branch_pattern == '*':
5177 5177 target_branch_match = True
5178 5178 else:
5179 5179 if self.target_branch_pattern.startswith('re:'):
5180 5180 target_pattern = self.target_branch_pattern[3:]
5181 5181 else:
5182 5182 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
5183 5183 target_branch_regex = re.compile(target_pattern)
5184 5184 target_branch_match = bool(target_branch_regex.search(target_branch))
5185 5185
5186 5186 branch_matches = source_branch_match and target_branch_match
5187 5187
5188 5188 files_matches = True
5189 5189 if self.file_pattern != '*':
5190 5190 files_matches = False
5191 5191 if self.file_pattern.startswith('re:'):
5192 5192 file_pattern = self.file_pattern[3:]
5193 5193 else:
5194 5194 file_pattern = glob2re(self.file_pattern)
5195 5195 file_regex = re.compile(file_pattern)
5196 5196 for file_data in files_changed:
5197 5197 filename = file_data.get('filename')
5198 5198
5199 5199 if file_regex.search(filename):
5200 5200 files_matches = True
5201 5201 break
5202 5202
5203 5203 return branch_matches and files_matches
5204 5204
5205 5205 @property
5206 5206 def review_users(self):
5207 5207 """ Returns the users which this rule applies to """
5208 5208
5209 5209 users = collections.OrderedDict()
5210 5210
5211 5211 for rule_user in self.rule_users:
5212 5212 if rule_user.user.active:
5213 5213 if rule_user.user not in users:
5214 5214 users[rule_user.user.username] = {
5215 5215 'user': rule_user.user,
5216 5216 'source': 'user',
5217 5217 'source_data': {},
5218 5218 'data': rule_user.rule_data()
5219 5219 }
5220 5220
5221 5221 for rule_user_group in self.rule_user_groups:
5222 5222 source_data = {
5223 5223 'user_group_id': rule_user_group.users_group.users_group_id,
5224 5224 'name': rule_user_group.users_group.users_group_name,
5225 5225 'members': len(rule_user_group.users_group.members)
5226 5226 }
5227 5227 for member in rule_user_group.users_group.members:
5228 5228 if member.user.active:
5229 5229 key = member.user.username
5230 5230 if key in users:
5231 5231 # skip this member as we have him already
5232 5232 # this prevents from override the "first" matched
5233 5233 # users with duplicates in multiple groups
5234 5234 continue
5235 5235
5236 5236 users[key] = {
5237 5237 'user': member.user,
5238 5238 'source': 'user_group',
5239 5239 'source_data': source_data,
5240 5240 'data': rule_user_group.rule_data()
5241 5241 }
5242 5242
5243 5243 return users
5244 5244
5245 5245 def user_group_vote_rule(self, user_id):
5246 5246
5247 5247 rules = []
5248 5248 if not self.rule_user_groups:
5249 5249 return rules
5250 5250
5251 5251 for user_group in self.rule_user_groups:
5252 5252 user_group_members = [x.user_id for x in user_group.users_group.members]
5253 5253 if user_id in user_group_members:
5254 5254 rules.append(user_group)
5255 5255 return rules
5256 5256
5257 5257 def __repr__(self):
5258 5258 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
5259 5259 self.repo_review_rule_id, self.repo)
5260 5260
5261 5261
5262 5262 class ScheduleEntry(Base, BaseModel):
5263 5263 __tablename__ = 'schedule_entries'
5264 5264 __table_args__ = (
5265 5265 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
5266 5266 UniqueConstraint('task_uid', name='s_task_uid_idx'),
5267 5267 base_table_args,
5268 5268 )
5269 5269
5270 5270 schedule_types = ['crontab', 'timedelta', 'integer']
5271 5271 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
5272 5272
5273 5273 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
5274 5274 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
5275 5275 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
5276 5276
5277 5277 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
5278 5278 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
5279 5279
5280 5280 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
5281 5281 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
5282 5282
5283 5283 # task
5284 5284 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
5285 5285 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
5286 5286 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
5287 5287 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
5288 5288
5289 5289 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5290 5290 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
5291 5291
5292 5292 @hybrid_property
5293 5293 def schedule_type(self):
5294 5294 return self._schedule_type
5295 5295
5296 5296 @schedule_type.setter
5297 5297 def schedule_type(self, val):
5298 5298 if val not in self.schedule_types:
5299 5299 raise ValueError('Value must be on of `{}` and got `{}`'.format(
5300 5300 val, self.schedule_type))
5301 5301
5302 5302 self._schedule_type = val
5303 5303
5304 5304 @classmethod
5305 5305 def get_uid(cls, obj):
5306 5306 args = obj.task_args
5307 5307 kwargs = obj.task_kwargs
5308 5308 if isinstance(args, JsonRaw):
5309 5309 try:
5310 5310 args = json.loads(args)
5311 5311 except ValueError:
5312 5312 args = tuple()
5313 5313
5314 5314 if isinstance(kwargs, JsonRaw):
5315 5315 try:
5316 5316 kwargs = json.loads(kwargs)
5317 5317 except ValueError:
5318 5318 kwargs = dict()
5319 5319
5320 5320 dot_notation = obj.task_dot_notation
5321 5321 val = '.'.join(map(safe_str, [
5322 5322 sorted(dot_notation), args, sorted(kwargs.items())]))
5323 5323 return hashlib.sha1(val).hexdigest()
5324 5324
5325 5325 @classmethod
5326 5326 def get_by_schedule_name(cls, schedule_name):
5327 5327 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
5328 5328
5329 5329 @classmethod
5330 5330 def get_by_schedule_id(cls, schedule_id):
5331 5331 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
5332 5332
5333 5333 @property
5334 5334 def task(self):
5335 5335 return self.task_dot_notation
5336 5336
5337 5337 @property
5338 5338 def schedule(self):
5339 5339 from rhodecode.lib.celerylib.utils import raw_2_schedule
5340 5340 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
5341 5341 return schedule
5342 5342
5343 5343 @property
5344 5344 def args(self):
5345 5345 try:
5346 5346 return list(self.task_args or [])
5347 5347 except ValueError:
5348 5348 return list()
5349 5349
5350 5350 @property
5351 5351 def kwargs(self):
5352 5352 try:
5353 5353 return dict(self.task_kwargs or {})
5354 5354 except ValueError:
5355 5355 return dict()
5356 5356
5357 5357 def _as_raw(self, val, indent=None):
5358 5358 if hasattr(val, 'de_coerce'):
5359 5359 val = val.de_coerce()
5360 5360 if val:
5361 5361 val = json.dumps(val, indent=indent, sort_keys=True)
5362 5362
5363 5363 return val
5364 5364
5365 5365 @property
5366 5366 def schedule_definition_raw(self):
5367 5367 return self._as_raw(self.schedule_definition)
5368 5368
5369 5369 def args_raw(self, indent=None):
5370 5370 return self._as_raw(self.task_args, indent)
5371 5371
5372 5372 def kwargs_raw(self, indent=None):
5373 5373 return self._as_raw(self.task_kwargs, indent)
5374 5374
5375 5375 def __repr__(self):
5376 5376 return '<DB:ScheduleEntry({}:{})>'.format(
5377 5377 self.schedule_entry_id, self.schedule_name)
5378 5378
5379 5379
5380 5380 @event.listens_for(ScheduleEntry, 'before_update')
5381 5381 def update_task_uid(mapper, connection, target):
5382 5382 target.task_uid = ScheduleEntry.get_uid(target)
5383 5383
5384 5384
5385 5385 @event.listens_for(ScheduleEntry, 'before_insert')
5386 5386 def set_task_uid(mapper, connection, target):
5387 5387 target.task_uid = ScheduleEntry.get_uid(target)
5388 5388
5389 5389
5390 5390 class _BaseBranchPerms(BaseModel):
5391 5391 @classmethod
5392 5392 def compute_hash(cls, value):
5393 5393 return sha1_safe(value)
5394 5394
5395 5395 @hybrid_property
5396 5396 def branch_pattern(self):
5397 5397 return self._branch_pattern or '*'
5398 5398
5399 5399 @hybrid_property
5400 5400 def branch_hash(self):
5401 5401 return self._branch_hash
5402 5402
5403 5403 def _validate_glob(self, value):
5404 5404 re.compile('^' + glob2re(value) + '$')
5405 5405
5406 5406 @branch_pattern.setter
5407 5407 def branch_pattern(self, value):
5408 5408 self._validate_glob(value)
5409 5409 self._branch_pattern = value or '*'
5410 5410 # set the Hash when setting the branch pattern
5411 5411 self._branch_hash = self.compute_hash(self._branch_pattern)
5412 5412
5413 5413 def matches(self, branch):
5414 5414 """
5415 5415 Check if this the branch matches entry
5416 5416
5417 5417 :param branch: branch name for the commit
5418 5418 """
5419 5419
5420 5420 branch = branch or ''
5421 5421
5422 5422 branch_matches = True
5423 5423 if branch:
5424 5424 branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$')
5425 5425 branch_matches = bool(branch_regex.search(branch))
5426 5426
5427 5427 return branch_matches
5428 5428
5429 5429
5430 5430 class UserToRepoBranchPermission(Base, _BaseBranchPerms):
5431 5431 __tablename__ = 'user_to_repo_branch_permissions'
5432 5432 __table_args__ = (
5433 5433 base_table_args
5434 5434 )
5435 5435
5436 5436 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5437 5437
5438 5438 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5439 5439 repo = relationship('Repository', backref='user_branch_perms')
5440 5440
5441 5441 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5442 5442 permission = relationship('Permission')
5443 5443
5444 5444 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None)
5445 5445 user_repo_to_perm = relationship('UserRepoToPerm')
5446 5446
5447 5447 rule_order = Column('rule_order', Integer(), nullable=False)
5448 5448 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5449 5449 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5450 5450
5451 5451 def __unicode__(self):
5452 5452 return u'<UserBranchPermission(%s => %r)>' % (
5453 5453 self.user_repo_to_perm, self.branch_pattern)
5454 5454
5455 5455
5456 5456 class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms):
5457 5457 __tablename__ = 'user_group_to_repo_branch_permissions'
5458 5458 __table_args__ = (
5459 5459 base_table_args
5460 5460 )
5461 5461
5462 5462 branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True)
5463 5463
5464 5464 repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
5465 5465 repo = relationship('Repository', backref='user_group_branch_perms')
5466 5466
5467 5467 permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
5468 5468 permission = relationship('Permission')
5469 5469
5470 5470 rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None)
5471 5471 user_group_repo_to_perm = relationship('UserGroupRepoToPerm')
5472 5472
5473 5473 rule_order = Column('rule_order', Integer(), nullable=False)
5474 5474 _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default=u'*') # glob
5475 5475 _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql'))
5476 5476
5477 5477 def __unicode__(self):
5478 5478 return u'<UserBranchPermission(%s => %r)>' % (
5479 5479 self.user_group_repo_to_perm, self.branch_pattern)
5480 5480
5481 5481
5482 5482 class UserBookmark(Base, BaseModel):
5483 5483 __tablename__ = 'user_bookmarks'
5484 5484 __table_args__ = (
5485 5485 UniqueConstraint('user_id', 'bookmark_repo_id'),
5486 5486 UniqueConstraint('user_id', 'bookmark_repo_group_id'),
5487 5487 UniqueConstraint('user_id', 'bookmark_position'),
5488 5488 base_table_args
5489 5489 )
5490 5490
5491 5491 user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
5492 5492 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
5493 5493 position = Column("bookmark_position", Integer(), nullable=False)
5494 5494 title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None)
5495 5495 redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None)
5496 5496 created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5497 5497
5498 5498 bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None)
5499 5499 bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None)
5500 5500
5501 5501 user = relationship("User")
5502 5502
5503 5503 repository = relationship("Repository")
5504 5504 repository_group = relationship("RepoGroup")
5505 5505
5506 5506 @classmethod
5507 5507 def get_by_position_for_user(cls, position, user_id):
5508 5508 return cls.query() \
5509 5509 .filter(UserBookmark.user_id == user_id) \
5510 5510 .filter(UserBookmark.position == position).scalar()
5511 5511
5512 5512 @classmethod
5513 5513 def get_bookmarks_for_user(cls, user_id, cache=True):
5514 5514 bookmarks = cls.query() \
5515 5515 .filter(UserBookmark.user_id == user_id) \
5516 5516 .options(joinedload(UserBookmark.repository)) \
5517 5517 .options(joinedload(UserBookmark.repository_group)) \
5518 5518 .order_by(UserBookmark.position.asc())
5519 5519
5520 5520 if cache:
5521 5521 bookmarks = bookmarks.options(
5522 5522 FromCache("sql_cache_short", "get_user_{}_bookmarks".format(user_id))
5523 5523 )
5524 5524
5525 5525 return bookmarks.all()
5526 5526
5527 5527 def __unicode__(self):
5528 5528 return u'<UserBookmark(%s @ %r)>' % (self.position, self.redirect_url)
5529 5529
5530 5530
5531 5531 class FileStore(Base, BaseModel):
5532 5532 __tablename__ = 'file_store'
5533 5533 __table_args__ = (
5534 5534 base_table_args
5535 5535 )
5536 5536
5537 5537 file_store_id = Column('file_store_id', Integer(), primary_key=True)
5538 5538 file_uid = Column('file_uid', String(1024), nullable=False)
5539 5539 file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True)
5540 5540 file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True)
5541 5541 file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False)
5542 5542
5543 5543 # sha256 hash
5544 5544 file_hash = Column('file_hash', String(512), nullable=False)
5545 5545 file_size = Column('file_size', BigInteger(), nullable=False)
5546 5546
5547 5547 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
5548 5548 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True)
5549 5549 accessed_count = Column('accessed_count', Integer(), default=0)
5550 5550
5551 5551 enabled = Column('enabled', Boolean(), nullable=False, default=True)
5552 5552
5553 5553 # if repo/repo_group reference is set, check for permissions
5554 5554 check_acl = Column('check_acl', Boolean(), nullable=False, default=True)
5555 5555
5556 5556 # hidden defines an attachment that should be hidden from showing in artifact listing
5557 5557 hidden = Column('hidden', Boolean(), nullable=False, default=False)
5558 5558
5559 5559 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
5560 5560 upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id')
5561 5561
5562 5562 file_metadata = relationship('FileStoreMetadata', lazy='joined')
5563 5563
5564 5564 # scope limited to user, which requester have access to
5565 5565 scope_user_id = Column(
5566 5566 'scope_user_id', Integer(), ForeignKey('users.user_id'),
5567 5567 nullable=True, unique=None, default=None)
5568 5568 user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id')
5569 5569
5570 5570 # scope limited to user group, which requester have access to
5571 5571 scope_user_group_id = Column(
5572 5572 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'),
5573 5573 nullable=True, unique=None, default=None)
5574 5574 user_group = relationship('UserGroup', lazy='joined')
5575 5575
5576 5576 # scope limited to repo, which requester have access to
5577 5577 scope_repo_id = Column(
5578 5578 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'),
5579 5579 nullable=True, unique=None, default=None)
5580 5580 repo = relationship('Repository', lazy='joined')
5581 5581
5582 5582 # scope limited to repo group, which requester have access to
5583 5583 scope_repo_group_id = Column(
5584 5584 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'),
5585 5585 nullable=True, unique=None, default=None)
5586 5586 repo_group = relationship('RepoGroup', lazy='joined')
5587 5587
5588 5588 @classmethod
5589 5589 def get_by_store_uid(cls, file_store_uid, safe=False):
5590 5590 if safe:
5591 5591 return FileStore.query().filter(FileStore.file_uid == file_store_uid).first()
5592 5592 else:
5593 5593 return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar()
5594 5594
5595 5595 @classmethod
5596 5596 def create(cls, file_uid, filename, file_hash, file_size, file_display_name='',
5597 5597 file_description='', enabled=True, hidden=False, check_acl=True,
5598 5598 user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None):
5599 5599
5600 5600 store_entry = FileStore()
5601 5601 store_entry.file_uid = file_uid
5602 5602 store_entry.file_display_name = file_display_name
5603 5603 store_entry.file_org_name = filename
5604 5604 store_entry.file_size = file_size
5605 5605 store_entry.file_hash = file_hash
5606 5606 store_entry.file_description = file_description
5607 5607
5608 5608 store_entry.check_acl = check_acl
5609 5609 store_entry.enabled = enabled
5610 5610 store_entry.hidden = hidden
5611 5611
5612 5612 store_entry.user_id = user_id
5613 5613 store_entry.scope_user_id = scope_user_id
5614 5614 store_entry.scope_repo_id = scope_repo_id
5615 5615 store_entry.scope_repo_group_id = scope_repo_group_id
5616 5616
5617 5617 return store_entry
5618 5618
5619 5619 @classmethod
5620 5620 def store_metadata(cls, file_store_id, args, commit=True):
5621 5621 file_store = FileStore.get(file_store_id)
5622 5622 if file_store is None:
5623 5623 return
5624 5624
5625 5625 for section, key, value, value_type in args:
5626 5626 has_key = FileStoreMetadata().query() \
5627 5627 .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \
5628 5628 .filter(FileStoreMetadata.file_store_meta_section == section) \
5629 5629 .filter(FileStoreMetadata.file_store_meta_key == key) \
5630 5630 .scalar()
5631 5631 if has_key:
5632 5632 msg = 'key `{}` already defined under section `{}` for this file.'\
5633 5633 .format(key, section)
5634 5634 raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key)
5635 5635
5636 5636 # NOTE(marcink): raises ArtifactMetadataBadValueType
5637 5637 FileStoreMetadata.valid_value_type(value_type)
5638 5638
5639 5639 meta_entry = FileStoreMetadata()
5640 5640 meta_entry.file_store = file_store
5641 5641 meta_entry.file_store_meta_section = section
5642 5642 meta_entry.file_store_meta_key = key
5643 5643 meta_entry.file_store_meta_value_type = value_type
5644 5644 meta_entry.file_store_meta_value = value
5645 5645
5646 5646 Session().add(meta_entry)
5647 5647
5648 5648 try:
5649 5649 if commit:
5650 5650 Session().commit()
5651 5651 except IntegrityError:
5652 5652 Session().rollback()
5653 5653 raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.')
5654 5654
5655 5655 @classmethod
5656 5656 def bump_access_counter(cls, file_uid, commit=True):
5657 5657 FileStore().query()\
5658 5658 .filter(FileStore.file_uid == file_uid)\
5659 5659 .update({FileStore.accessed_count: (FileStore.accessed_count + 1),
5660 5660 FileStore.accessed_on: datetime.datetime.now()})
5661 5661 if commit:
5662 5662 Session().commit()
5663 5663
5664 5664 def __json__(self):
5665 5665 data = {
5666 5666 'filename': self.file_display_name,
5667 5667 'filename_org': self.file_org_name,
5668 5668 'file_uid': self.file_uid,
5669 5669 'description': self.file_description,
5670 5670 'hidden': self.hidden,
5671 5671 'size': self.file_size,
5672 5672 'created_on': self.created_on,
5673 5673 'uploaded_by': self.upload_user.get_api_data(details='basic'),
5674 5674 'downloaded_times': self.accessed_count,
5675 5675 'sha256': self.file_hash,
5676 5676 'metadata': self.file_metadata,
5677 5677 }
5678 5678
5679 5679 return data
5680 5680
5681 5681 def __repr__(self):
5682 5682 return '<FileStore({})>'.format(self.file_store_id)
5683 5683
5684 5684
5685 5685 class FileStoreMetadata(Base, BaseModel):
5686 5686 __tablename__ = 'file_store_metadata'
5687 5687 __table_args__ = (
5688 5688 UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'),
5689 5689 Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255),
5690 5690 Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255),
5691 5691 base_table_args
5692 5692 )
5693 5693 SETTINGS_TYPES = {
5694 5694 'str': safe_str,
5695 5695 'int': safe_int,
5696 5696 'unicode': safe_unicode,
5697 5697 'bool': str2bool,
5698 5698 'list': functools.partial(aslist, sep=',')
5699 5699 }
5700 5700
5701 5701 file_store_meta_id = Column(
5702 5702 "file_store_meta_id", Integer(), nullable=False, unique=True, default=None,
5703 5703 primary_key=True)
5704 5704 _file_store_meta_section = Column(
5705 5705 "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5706 5706 nullable=True, unique=None, default=None)
5707 5707 _file_store_meta_section_hash = Column(
5708 5708 "file_store_meta_section_hash", String(255),
5709 5709 nullable=True, unique=None, default=None)
5710 5710 _file_store_meta_key = Column(
5711 5711 "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'),
5712 5712 nullable=True, unique=None, default=None)
5713 5713 _file_store_meta_key_hash = Column(
5714 5714 "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None)
5715 5715 _file_store_meta_value = Column(
5716 5716 "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'),
5717 5717 nullable=True, unique=None, default=None)
5718 5718 _file_store_meta_value_type = Column(
5719 5719 "file_store_meta_value_type", String(255), nullable=True, unique=None,
5720 5720 default='unicode')
5721 5721
5722 5722 file_store_id = Column(
5723 5723 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'),
5724 5724 nullable=True, unique=None, default=None)
5725 5725
5726 5726 file_store = relationship('FileStore', lazy='joined')
5727 5727
5728 5728 @classmethod
5729 5729 def valid_value_type(cls, value):
5730 5730 if value.split('.')[0] not in cls.SETTINGS_TYPES:
5731 5731 raise ArtifactMetadataBadValueType(
5732 5732 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value))
5733 5733
5734 5734 @hybrid_property
5735 5735 def file_store_meta_section(self):
5736 5736 return self._file_store_meta_section
5737 5737
5738 5738 @file_store_meta_section.setter
5739 5739 def file_store_meta_section(self, value):
5740 5740 self._file_store_meta_section = value
5741 5741 self._file_store_meta_section_hash = _hash_key(value)
5742 5742
5743 5743 @hybrid_property
5744 5744 def file_store_meta_key(self):
5745 5745 return self._file_store_meta_key
5746 5746
5747 5747 @file_store_meta_key.setter
5748 5748 def file_store_meta_key(self, value):
5749 5749 self._file_store_meta_key = value
5750 5750 self._file_store_meta_key_hash = _hash_key(value)
5751 5751
5752 5752 @hybrid_property
5753 5753 def file_store_meta_value(self):
5754 5754 val = self._file_store_meta_value
5755 5755
5756 5756 if self._file_store_meta_value_type:
5757 5757 # e.g unicode.encrypted == unicode
5758 5758 _type = self._file_store_meta_value_type.split('.')[0]
5759 5759 # decode the encrypted value if it's encrypted field type
5760 5760 if '.encrypted' in self._file_store_meta_value_type:
5761 5761 cipher = EncryptedTextValue()
5762 5762 val = safe_unicode(cipher.process_result_value(val, None))
5763 5763 # do final type conversion
5764 5764 converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode']
5765 5765 val = converter(val)
5766 5766
5767 5767 return val
5768 5768
5769 5769 @file_store_meta_value.setter
5770 5770 def file_store_meta_value(self, val):
5771 5771 val = safe_unicode(val)
5772 5772 # encode the encrypted value
5773 5773 if '.encrypted' in self.file_store_meta_value_type:
5774 5774 cipher = EncryptedTextValue()
5775 5775 val = safe_unicode(cipher.process_bind_param(val, None))
5776 5776 self._file_store_meta_value = val
5777 5777
5778 5778 @hybrid_property
5779 5779 def file_store_meta_value_type(self):
5780 5780 return self._file_store_meta_value_type
5781 5781
5782 5782 @file_store_meta_value_type.setter
5783 5783 def file_store_meta_value_type(self, val):
5784 5784 # e.g unicode.encrypted
5785 5785 self.valid_value_type(val)
5786 5786 self._file_store_meta_value_type = val
5787 5787
5788 5788 def __json__(self):
5789 5789 data = {
5790 5790 'artifact': self.file_store.file_uid,
5791 5791 'section': self.file_store_meta_section,
5792 5792 'key': self.file_store_meta_key,
5793 5793 'value': self.file_store_meta_value,
5794 5794 }
5795 5795
5796 5796 return data
5797 5797
5798 5798 def __repr__(self):
5799 5799 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.file_store_meta_section,
5800 5800 self.file_store_meta_key, self.file_store_meta_value)
5801 5801
5802 5802
5803 5803 class DbMigrateVersion(Base, BaseModel):
5804 5804 __tablename__ = 'db_migrate_version'
5805 5805 __table_args__ = (
5806 5806 base_table_args,
5807 5807 )
5808 5808
5809 5809 repository_id = Column('repository_id', String(250), primary_key=True)
5810 5810 repository_path = Column('repository_path', Text)
5811 5811 version = Column('version', Integer)
5812 5812
5813 5813 @classmethod
5814 5814 def set_version(cls, version):
5815 5815 """
5816 5816 Helper for forcing a different version, usually for debugging purposes via ishell.
5817 5817 """
5818 5818 ver = DbMigrateVersion.query().first()
5819 5819 ver.version = version
5820 5820 Session().commit()
5821 5821
5822 5822
5823 5823 class DbSession(Base, BaseModel):
5824 5824 __tablename__ = 'db_session'
5825 5825 __table_args__ = (
5826 5826 base_table_args,
5827 5827 )
5828 5828
5829 5829 def __repr__(self):
5830 5830 return '<DB:DbSession({})>'.format(self.id)
5831 5831
5832 5832 id = Column('id', Integer())
5833 5833 namespace = Column('namespace', String(255), primary_key=True)
5834 5834 accessed = Column('accessed', DateTime, nullable=False)
5835 5835 created = Column('created', DateTime, nullable=False)
5836 5836 data = Column('data', PickleType, nullable=False)
@@ -1,598 +1,600 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 permissions model for RhodeCode
23 23 """
24 24 import collections
25 25 import logging
26 26 import traceback
27 27
28 28 from sqlalchemy.exc import DatabaseError
29 29
30 30 from rhodecode import events
31 31 from rhodecode.model import BaseModel
32 32 from rhodecode.model.db import (
33 33 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
34 34 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
35 35 from rhodecode.lib.utils2 import str2bool, safe_int
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class PermissionModel(BaseModel):
41 41 """
42 42 Permissions model for RhodeCode
43 43 """
44 FORKING_DISABLED = 'hg.fork.none'
45 FORKING_ENABLED = 'hg.fork.repository'
44 46
45 47 cls = Permission
46 48 global_perms = {
47 49 'default_repo_create': None,
48 50 # special case for create repos on write access to group
49 51 'default_repo_create_on_write': None,
50 52 'default_repo_group_create': None,
51 53 'default_user_group_create': None,
52 54 'default_fork_create': None,
53 55 'default_inherit_default_permissions': None,
54 56 'default_register': None,
55 57 'default_password_reset': None,
56 58 'default_extern_activate': None,
57 59
58 60 # object permissions below
59 61 'default_repo_perm': None,
60 62 'default_group_perm': None,
61 63 'default_user_group_perm': None,
62 64
63 65 # branch
64 66 'default_branch_perm': None,
65 67 }
66 68
67 69 def set_global_permission_choices(self, c_obj, gettext_translator):
68 70 _ = gettext_translator
69 71
70 72 c_obj.repo_perms_choices = [
71 73 ('repository.none', _('None'),),
72 74 ('repository.read', _('Read'),),
73 75 ('repository.write', _('Write'),),
74 76 ('repository.admin', _('Admin'),)]
75 77
76 78 c_obj.group_perms_choices = [
77 79 ('group.none', _('None'),),
78 80 ('group.read', _('Read'),),
79 81 ('group.write', _('Write'),),
80 82 ('group.admin', _('Admin'),)]
81 83
82 84 c_obj.user_group_perms_choices = [
83 85 ('usergroup.none', _('None'),),
84 86 ('usergroup.read', _('Read'),),
85 87 ('usergroup.write', _('Write'),),
86 88 ('usergroup.admin', _('Admin'),)]
87 89
88 90 c_obj.branch_perms_choices = [
89 91 ('branch.none', _('Protected/No Access'),),
90 92 ('branch.merge', _('Web merge'),),
91 93 ('branch.push', _('Push'),),
92 94 ('branch.push_force', _('Force Push'),)]
93 95
94 96 c_obj.register_choices = [
95 97 ('hg.register.none', _('Disabled')),
96 98 ('hg.register.manual_activate', _('Allowed with manual account activation')),
97 99 ('hg.register.auto_activate', _('Allowed with automatic account activation')),]
98 100
99 101 c_obj.password_reset_choices = [
100 102 ('hg.password_reset.enabled', _('Allow password recovery')),
101 103 ('hg.password_reset.hidden', _('Hide password recovery link')),
102 104 ('hg.password_reset.disabled', _('Disable password recovery')),]
103 105
104 106 c_obj.extern_activate_choices = [
105 107 ('hg.extern_activate.manual', _('Manual activation of external account')),
106 108 ('hg.extern_activate.auto', _('Automatic activation of external account')),]
107 109
108 110 c_obj.repo_create_choices = [
109 111 ('hg.create.none', _('Disabled')),
110 112 ('hg.create.repository', _('Enabled'))]
111 113
112 114 c_obj.repo_create_on_write_choices = [
113 115 ('hg.create.write_on_repogroup.false', _('Disabled')),
114 116 ('hg.create.write_on_repogroup.true', _('Enabled'))]
115 117
116 118 c_obj.user_group_create_choices = [
117 119 ('hg.usergroup.create.false', _('Disabled')),
118 120 ('hg.usergroup.create.true', _('Enabled'))]
119 121
120 122 c_obj.repo_group_create_choices = [
121 123 ('hg.repogroup.create.false', _('Disabled')),
122 124 ('hg.repogroup.create.true', _('Enabled'))]
123 125
124 126 c_obj.fork_choices = [
125 ('hg.fork.none', _('Disabled')),
126 ('hg.fork.repository', _('Enabled'))]
127 (self.FORKING_DISABLED, _('Disabled')),
128 (self.FORKING_ENABLED, _('Enabled'))]
127 129
128 130 c_obj.inherit_default_permission_choices = [
129 131 ('hg.inherit_default_perms.false', _('Disabled')),
130 132 ('hg.inherit_default_perms.true', _('Enabled'))]
131 133
132 134 def get_default_perms(self, object_perms, suffix):
133 135 defaults = {}
134 136 for perm in object_perms:
135 137 # perms
136 138 if perm.permission.permission_name.startswith('repository.'):
137 139 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
138 140
139 141 if perm.permission.permission_name.startswith('group.'):
140 142 defaults['default_group_perm' + suffix] = perm.permission.permission_name
141 143
142 144 if perm.permission.permission_name.startswith('usergroup.'):
143 145 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
144 146
145 147 # branch
146 148 if perm.permission.permission_name.startswith('branch.'):
147 149 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
148 150
149 151 # creation of objects
150 152 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
151 153 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
152 154
153 155 elif perm.permission.permission_name.startswith('hg.create.'):
154 156 defaults['default_repo_create' + suffix] = perm.permission.permission_name
155 157
156 158 if perm.permission.permission_name.startswith('hg.fork.'):
157 159 defaults['default_fork_create' + suffix] = perm.permission.permission_name
158 160
159 161 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
160 162 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
161 163
162 164 if perm.permission.permission_name.startswith('hg.repogroup.'):
163 165 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
164 166
165 167 if perm.permission.permission_name.startswith('hg.usergroup.'):
166 168 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
167 169
168 170 # registration and external account activation
169 171 if perm.permission.permission_name.startswith('hg.register.'):
170 172 defaults['default_register' + suffix] = perm.permission.permission_name
171 173
172 174 if perm.permission.permission_name.startswith('hg.password_reset.'):
173 175 defaults['default_password_reset' + suffix] = perm.permission.permission_name
174 176
175 177 if perm.permission.permission_name.startswith('hg.extern_activate.'):
176 178 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
177 179
178 180 return defaults
179 181
180 182 def _make_new_user_perm(self, user, perm_name):
181 183 log.debug('Creating new user permission:%s', perm_name)
182 184 new = UserToPerm()
183 185 new.user = user
184 186 new.permission = Permission.get_by_key(perm_name)
185 187 return new
186 188
187 189 def _make_new_user_group_perm(self, user_group, perm_name):
188 190 log.debug('Creating new user group permission:%s', perm_name)
189 191 new = UserGroupToPerm()
190 192 new.users_group = user_group
191 193 new.permission = Permission.get_by_key(perm_name)
192 194 return new
193 195
194 196 def _keep_perm(self, perm_name, keep_fields):
195 197 def get_pat(field_name):
196 198 return {
197 199 # global perms
198 200 'default_repo_create': 'hg.create.',
199 201 # special case for create repos on write access to group
200 202 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
201 203 'default_repo_group_create': 'hg.repogroup.create.',
202 204 'default_user_group_create': 'hg.usergroup.create.',
203 205 'default_fork_create': 'hg.fork.',
204 206 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
205 207
206 208 # application perms
207 209 'default_register': 'hg.register.',
208 210 'default_password_reset': 'hg.password_reset.',
209 211 'default_extern_activate': 'hg.extern_activate.',
210 212
211 213 # object permissions below
212 214 'default_repo_perm': 'repository.',
213 215 'default_group_perm': 'group.',
214 216 'default_user_group_perm': 'usergroup.',
215 217 # branch
216 218 'default_branch_perm': 'branch.',
217 219
218 220 }[field_name]
219 221 for field in keep_fields:
220 222 pat = get_pat(field)
221 223 if perm_name.startswith(pat):
222 224 return True
223 225 return False
224 226
225 227 def _clear_object_perm(self, object_perms, preserve=None):
226 228 preserve = preserve or []
227 229 _deleted = []
228 230 for perm in object_perms:
229 231 perm_name = perm.permission.permission_name
230 232 if not self._keep_perm(perm_name, keep_fields=preserve):
231 233 _deleted.append(perm_name)
232 234 self.sa.delete(perm)
233 235 return _deleted
234 236
235 237 def _clear_user_perms(self, user_id, preserve=None):
236 238 perms = self.sa.query(UserToPerm)\
237 239 .filter(UserToPerm.user_id == user_id)\
238 240 .all()
239 241 return self._clear_object_perm(perms, preserve=preserve)
240 242
241 243 def _clear_user_group_perms(self, user_group_id, preserve=None):
242 244 perms = self.sa.query(UserGroupToPerm)\
243 245 .filter(UserGroupToPerm.users_group_id == user_group_id)\
244 246 .all()
245 247 return self._clear_object_perm(perms, preserve=preserve)
246 248
247 249 def _set_new_object_perms(self, obj_type, object, form_result, preserve=None):
248 250 # clear current entries, to make this function idempotent
249 251 # it will fix even if we define more permissions or permissions
250 252 # are somehow missing
251 253 preserve = preserve or []
252 254 _global_perms = self.global_perms.copy()
253 255 if obj_type not in ['user', 'user_group']:
254 256 raise ValueError("obj_type must be on of 'user' or 'user_group'")
255 257 global_perms = len(_global_perms)
256 258 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
257 259 if global_perms != default_user_perms:
258 260 raise Exception(
259 261 'Inconsistent permissions definition. Got {} vs {}'.format(
260 262 global_perms, default_user_perms))
261 263
262 264 if obj_type == 'user':
263 265 self._clear_user_perms(object.user_id, preserve)
264 266 if obj_type == 'user_group':
265 267 self._clear_user_group_perms(object.users_group_id, preserve)
266 268
267 269 # now kill the keys that we want to preserve from the form.
268 270 for key in preserve:
269 271 del _global_perms[key]
270 272
271 273 for k in _global_perms.copy():
272 274 _global_perms[k] = form_result[k]
273 275
274 276 # at that stage we validate all are passed inside form_result
275 277 for _perm_key, perm_value in _global_perms.items():
276 278 if perm_value is None:
277 279 raise ValueError('Missing permission for %s' % (_perm_key,))
278 280
279 281 if obj_type == 'user':
280 282 p = self._make_new_user_perm(object, perm_value)
281 283 self.sa.add(p)
282 284 if obj_type == 'user_group':
283 285 p = self._make_new_user_group_perm(object, perm_value)
284 286 self.sa.add(p)
285 287
286 288 def _set_new_user_perms(self, user, form_result, preserve=None):
287 289 return self._set_new_object_perms(
288 290 'user', user, form_result, preserve)
289 291
290 292 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
291 293 return self._set_new_object_perms(
292 294 'user_group', user_group, form_result, preserve)
293 295
294 296 def set_new_user_perms(self, user, form_result):
295 297 # calculate what to preserve from what is given in form_result
296 298 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
297 299 return self._set_new_user_perms(user, form_result, preserve)
298 300
299 301 def set_new_user_group_perms(self, user_group, form_result):
300 302 # calculate what to preserve from what is given in form_result
301 303 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
302 304 return self._set_new_user_group_perms(user_group, form_result, preserve)
303 305
304 306 def create_permissions(self):
305 307 """
306 308 Create permissions for whole system
307 309 """
308 310 for p in Permission.PERMS:
309 311 if not Permission.get_by_key(p[0]):
310 312 new_perm = Permission()
311 313 new_perm.permission_name = p[0]
312 314 new_perm.permission_longname = p[0] # translation err with p[1]
313 315 self.sa.add(new_perm)
314 316
315 317 def _create_default_object_permission(self, obj_type, obj, obj_perms,
316 318 force=False):
317 319 if obj_type not in ['user', 'user_group']:
318 320 raise ValueError("obj_type must be on of 'user' or 'user_group'")
319 321
320 322 def _get_group(perm_name):
321 323 return '.'.join(perm_name.split('.')[:1])
322 324
323 325 defined_perms_groups = map(
324 326 _get_group, (x.permission.permission_name for x in obj_perms))
325 327 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
326 328
327 329 if force:
328 330 self._clear_object_perm(obj_perms)
329 331 self.sa.commit()
330 332 defined_perms_groups = []
331 333 # for every default permission that needs to be created, we check if
332 334 # it's group is already defined, if it's not we create default perm
333 335 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
334 336 gr = _get_group(perm_name)
335 337 if gr not in defined_perms_groups:
336 338 log.debug('GR:%s not found, creating permission %s',
337 339 gr, perm_name)
338 340 if obj_type == 'user':
339 341 new_perm = self._make_new_user_perm(obj, perm_name)
340 342 self.sa.add(new_perm)
341 343 if obj_type == 'user_group':
342 344 new_perm = self._make_new_user_group_perm(obj, perm_name)
343 345 self.sa.add(new_perm)
344 346
345 347 def create_default_user_permissions(self, user, force=False):
346 348 """
347 349 Creates only missing default permissions for user, if force is set it
348 350 resets the default permissions for that user
349 351
350 352 :param user:
351 353 :param force:
352 354 """
353 355 user = self._get_user(user)
354 356 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
355 357 return self._create_default_object_permission(
356 358 'user', user, obj_perms, force)
357 359
358 360 def create_default_user_group_permissions(self, user_group, force=False):
359 361 """
360 362 Creates only missing default permissions for user group, if force is
361 363 set it resets the default permissions for that user group
362 364
363 365 :param user_group:
364 366 :param force:
365 367 """
366 368 user_group = self._get_user_group(user_group)
367 369 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
368 370 return self._create_default_object_permission(
369 371 'user_group', user_group, obj_perms, force)
370 372
371 373 def update_application_permissions(self, form_result):
372 374 if 'perm_user_id' in form_result:
373 375 perm_user = User.get(safe_int(form_result['perm_user_id']))
374 376 else:
375 377 # used mostly to do lookup for default user
376 378 perm_user = User.get_by_username(form_result['perm_user_name'])
377 379
378 380 try:
379 381 # stage 1 set anonymous access
380 382 if perm_user.username == User.DEFAULT_USER:
381 383 perm_user.active = str2bool(form_result['anonymous'])
382 384 self.sa.add(perm_user)
383 385
384 386 # stage 2 reset defaults and set them from form data
385 387 self._set_new_user_perms(perm_user, form_result, preserve=[
386 388 'default_repo_perm',
387 389 'default_group_perm',
388 390 'default_user_group_perm',
389 391 'default_branch_perm',
390 392
391 393 'default_repo_group_create',
392 394 'default_user_group_create',
393 395 'default_repo_create_on_write',
394 396 'default_repo_create',
395 397 'default_fork_create',
396 398 'default_inherit_default_permissions',])
397 399
398 400 self.sa.commit()
399 401 except (DatabaseError,):
400 402 log.error(traceback.format_exc())
401 403 self.sa.rollback()
402 404 raise
403 405
404 406 def update_user_permissions(self, form_result):
405 407 if 'perm_user_id' in form_result:
406 408 perm_user = User.get(safe_int(form_result['perm_user_id']))
407 409 else:
408 410 # used mostly to do lookup for default user
409 411 perm_user = User.get_by_username(form_result['perm_user_name'])
410 412 try:
411 413 # stage 2 reset defaults and set them from form data
412 414 self._set_new_user_perms(perm_user, form_result, preserve=[
413 415 'default_repo_perm',
414 416 'default_group_perm',
415 417 'default_user_group_perm',
416 418 'default_branch_perm',
417 419
418 420 'default_register',
419 421 'default_password_reset',
420 422 'default_extern_activate'])
421 423 self.sa.commit()
422 424 except (DatabaseError,):
423 425 log.error(traceback.format_exc())
424 426 self.sa.rollback()
425 427 raise
426 428
427 429 def update_user_group_permissions(self, form_result):
428 430 if 'perm_user_group_id' in form_result:
429 431 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
430 432 else:
431 433 # used mostly to do lookup for default user
432 434 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
433 435 try:
434 436 # stage 2 reset defaults and set them from form data
435 437 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
436 438 'default_repo_perm',
437 439 'default_group_perm',
438 440 'default_user_group_perm',
439 441 'default_branch_perm',
440 442
441 443 'default_register',
442 444 'default_password_reset',
443 445 'default_extern_activate'])
444 446 self.sa.commit()
445 447 except (DatabaseError,):
446 448 log.error(traceback.format_exc())
447 449 self.sa.rollback()
448 450 raise
449 451
450 452 def update_object_permissions(self, form_result):
451 453 if 'perm_user_id' in form_result:
452 454 perm_user = User.get(safe_int(form_result['perm_user_id']))
453 455 else:
454 456 # used mostly to do lookup for default user
455 457 perm_user = User.get_by_username(form_result['perm_user_name'])
456 458 try:
457 459
458 460 # stage 2 reset defaults and set them from form data
459 461 self._set_new_user_perms(perm_user, form_result, preserve=[
460 462 'default_repo_group_create',
461 463 'default_user_group_create',
462 464 'default_repo_create_on_write',
463 465 'default_repo_create',
464 466 'default_fork_create',
465 467 'default_inherit_default_permissions',
466 468 'default_branch_perm',
467 469
468 470 'default_register',
469 471 'default_password_reset',
470 472 'default_extern_activate'])
471 473
472 474 # overwrite default repo permissions
473 475 if form_result['overwrite_default_repo']:
474 476 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
475 477 _def = Permission.get_by_key('repository.' + _def_name)
476 478 for r2p in self.sa.query(UserRepoToPerm)\
477 479 .filter(UserRepoToPerm.user == perm_user)\
478 480 .all():
479 481 # don't reset PRIVATE repositories
480 482 if not r2p.repository.private:
481 483 r2p.permission = _def
482 484 self.sa.add(r2p)
483 485
484 486 # overwrite default repo group permissions
485 487 if form_result['overwrite_default_group']:
486 488 _def_name = form_result['default_group_perm'].split('group.')[-1]
487 489 _def = Permission.get_by_key('group.' + _def_name)
488 490 for g2p in self.sa.query(UserRepoGroupToPerm)\
489 491 .filter(UserRepoGroupToPerm.user == perm_user)\
490 492 .all():
491 493 g2p.permission = _def
492 494 self.sa.add(g2p)
493 495
494 496 # overwrite default user group permissions
495 497 if form_result['overwrite_default_user_group']:
496 498 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
497 499 # user groups
498 500 _def = Permission.get_by_key('usergroup.' + _def_name)
499 501 for g2p in self.sa.query(UserUserGroupToPerm)\
500 502 .filter(UserUserGroupToPerm.user == perm_user)\
501 503 .all():
502 504 g2p.permission = _def
503 505 self.sa.add(g2p)
504 506
505 507 # COMMIT
506 508 self.sa.commit()
507 509 except (DatabaseError,):
508 510 log.exception('Failed to set default object permissions')
509 511 self.sa.rollback()
510 512 raise
511 513
512 514 def update_branch_permissions(self, form_result):
513 515 if 'perm_user_id' in form_result:
514 516 perm_user = User.get(safe_int(form_result['perm_user_id']))
515 517 else:
516 518 # used mostly to do lookup for default user
517 519 perm_user = User.get_by_username(form_result['perm_user_name'])
518 520 try:
519 521
520 522 # stage 2 reset defaults and set them from form data
521 523 self._set_new_user_perms(perm_user, form_result, preserve=[
522 524 'default_repo_perm',
523 525 'default_group_perm',
524 526 'default_user_group_perm',
525 527
526 528 'default_repo_group_create',
527 529 'default_user_group_create',
528 530 'default_repo_create_on_write',
529 531 'default_repo_create',
530 532 'default_fork_create',
531 533 'default_inherit_default_permissions',
532 534
533 535 'default_register',
534 536 'default_password_reset',
535 537 'default_extern_activate'])
536 538
537 539 # overwrite default branch permissions
538 540 if form_result['overwrite_default_branch']:
539 541 _def_name = \
540 542 form_result['default_branch_perm'].split('branch.')[-1]
541 543
542 544 _def = Permission.get_by_key('branch.' + _def_name)
543 545
544 546 user_perms = UserToRepoBranchPermission.query()\
545 547 .join(UserToRepoBranchPermission.user_repo_to_perm)\
546 548 .filter(UserRepoToPerm.user == perm_user).all()
547 549
548 550 for g2p in user_perms:
549 551 g2p.permission = _def
550 552 self.sa.add(g2p)
551 553
552 554 # COMMIT
553 555 self.sa.commit()
554 556 except (DatabaseError,):
555 557 log.exception('Failed to set default branch permissions')
556 558 self.sa.rollback()
557 559 raise
558 560
559 561 def get_users_with_repo_write(self, db_repo):
560 562 write_plus = ['repository.write', 'repository.admin']
561 563 default_user_id = User.get_default_user_id()
562 564 user_write_permissions = collections.OrderedDict()
563 565
564 566 # write or higher and DEFAULT user for inheritance
565 567 for perm in db_repo.permissions():
566 568 if perm.permission in write_plus or perm.user_id == default_user_id:
567 569 user_write_permissions[perm.user_id] = perm
568 570 return user_write_permissions
569 571
570 572 def get_user_groups_with_repo_write(self, db_repo):
571 573 write_plus = ['repository.write', 'repository.admin']
572 574 user_group_write_permissions = collections.OrderedDict()
573 575
574 576 # write or higher and DEFAULT user for inheritance
575 577 for p in db_repo.permission_user_groups():
576 578 if p.permission in write_plus:
577 579 user_group_write_permissions[p.users_group_id] = p
578 580 return user_group_write_permissions
579 581
580 582 def trigger_permission_flush(self, affected_user_ids=None):
581 583 affected_user_ids = affected_user_ids or User.get_all_user_ids()
582 584 events.trigger(events.UserPermissionsChange(affected_user_ids))
583 585
584 586 def flush_user_permission_caches(self, changes, affected_user_ids=None):
585 587 affected_user_ids = affected_user_ids or []
586 588
587 589 for change in changes['added'] + changes['updated'] + changes['deleted']:
588 590 if change['type'] == 'user':
589 591 affected_user_ids.append(change['id'])
590 592 if change['type'] == 'user_group':
591 593 user_group = UserGroup.get(safe_int(change['id']))
592 594 if user_group:
593 595 group_members_ids = [x.user_id for x in user_group.members]
594 596 affected_user_ids.extend(group_members_ids)
595 597
596 598 self.trigger_permission_flush(affected_user_ids)
597 599
598 600 return affected_user_ids
@@ -1,2247 +1,2249 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import os
30 30
31 31 import datetime
32 32 import urllib
33 33 import collections
34 34
35 35 from pyramid import compat
36 36 from pyramid.threadlocal import get_current_request
37 37
38 38 from rhodecode.lib.vcs.nodes import FileNode
39 39 from rhodecode.translation import lazy_ugettext
40 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
41 41 from rhodecode.lib import audit_logger
42 42 from rhodecode.lib.compat import OrderedDict
43 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 44 from rhodecode.lib.markup_renderer import (
45 45 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 46 from rhodecode.lib.utils2 import (
47 47 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 48 get_current_rhodecode_user)
49 49 from rhodecode.lib.vcs.backends.base import (
50 50 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 51 TargetRefMissing, SourceRefMissing)
52 52 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 CommitDoesNotExistError, EmptyRepositoryError)
55 55 from rhodecode.model import BaseModel
56 56 from rhodecode.model.changeset_status import ChangesetStatusModel
57 57 from rhodecode.model.comment import CommentsModel
58 58 from rhodecode.model.db import (
59 59 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 60 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 61 from rhodecode.model.meta import Session
62 62 from rhodecode.model.notification import NotificationModel, \
63 63 EmailNotificationModel
64 64 from rhodecode.model.scm import ScmModel
65 65 from rhodecode.model.settings import VcsSettingsModel
66 66
67 67
68 68 log = logging.getLogger(__name__)
69 69
70 70
71 71 # Data structure to hold the response data when updating commits during a pull
72 72 # request update.
73 73 class UpdateResponse(object):
74 74
75 75 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 76 commit_changes, source_changed, target_changed):
77 77
78 78 self.executed = executed
79 79 self.reason = reason
80 80 self.new = new
81 81 self.old = old
82 82 self.common_ancestor_id = common_ancestor_id
83 83 self.changes = commit_changes
84 84 self.source_changed = source_changed
85 85 self.target_changed = target_changed
86 86
87 87
88 88 def get_diff_info(
89 89 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 90 get_commit_authors=True):
91 91 """
92 92 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 93 This is also used for default reviewers logic
94 94 """
95 95
96 96 source_scm = source_repo.scm_instance()
97 97 target_scm = target_repo.scm_instance()
98 98
99 99 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 100 if not ancestor_id:
101 101 raise ValueError(
102 102 'cannot calculate diff info without a common ancestor. '
103 103 'Make sure both repositories are related, and have a common forking commit.')
104 104
105 105 # case here is that want a simple diff without incoming commits,
106 106 # previewing what will be merged based only on commits in the source.
107 107 log.debug('Using ancestor %s as source_ref instead of %s',
108 108 ancestor_id, source_ref)
109 109
110 110 # source of changes now is the common ancestor
111 111 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 112 # target commit becomes the source ref as it is the last commit
113 113 # for diff generation this logic gives proper diff
114 114 target_commit = source_scm.get_commit(commit_id=source_ref)
115 115
116 116 vcs_diff = \
117 117 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 118 ignore_whitespace=False, context=3)
119 119
120 120 diff_processor = diffs.DiffProcessor(
121 121 vcs_diff, format='newdiff', diff_limit=None,
122 122 file_limit=None, show_full_diff=True)
123 123
124 124 _parsed = diff_processor.prepare()
125 125
126 126 all_files = []
127 127 all_files_changes = []
128 128 changed_lines = {}
129 129 stats = [0, 0]
130 130 for f in _parsed:
131 131 all_files.append(f['filename'])
132 132 all_files_changes.append({
133 133 'filename': f['filename'],
134 134 'stats': f['stats']
135 135 })
136 136 stats[0] += f['stats']['added']
137 137 stats[1] += f['stats']['deleted']
138 138
139 139 changed_lines[f['filename']] = []
140 140 if len(f['chunks']) < 2:
141 141 continue
142 142 # first line is "context" information
143 143 for chunks in f['chunks'][1:]:
144 144 for chunk in chunks['lines']:
145 145 if chunk['action'] not in ('del', 'mod'):
146 146 continue
147 147 changed_lines[f['filename']].append(chunk['old_lineno'])
148 148
149 149 commit_authors = []
150 150 user_counts = {}
151 151 email_counts = {}
152 152 author_counts = {}
153 153 _commit_cache = {}
154 154
155 155 commits = []
156 156 if get_commit_authors:
157 157 log.debug('Obtaining commit authors from set of commits')
158 158 _compare_data = target_scm.compare(
159 159 target_ref, source_ref, source_scm, merge=True,
160 160 pre_load=["author", "date", "message"]
161 161 )
162 162
163 163 for commit in _compare_data:
164 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
165 165 # at this function which is later called via JSON serialization
166 166 serialized_commit = dict(
167 167 author=commit.author,
168 168 date=commit.date,
169 169 message=commit.message,
170 170 commit_id=commit.raw_id,
171 171 raw_id=commit.raw_id
172 172 )
173 173 commits.append(serialized_commit)
174 174 user = User.get_from_cs_author(serialized_commit['author'])
175 175 if user and user not in commit_authors:
176 176 commit_authors.append(user)
177 177
178 178 # lines
179 179 if get_authors:
180 180 log.debug('Calculating authors of changed files')
181 181 target_commit = source_repo.get_commit(ancestor_id)
182 182
183 183 for fname, lines in changed_lines.items():
184 184
185 185 try:
186 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
187 187 except Exception:
188 188 log.exception("Failed to load node with path %s", fname)
189 189 continue
190 190
191 191 if not isinstance(node, FileNode):
192 192 continue
193 193
194 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
195 195 if node.is_binary:
196 196 author = node.last_commit.author
197 197 email = node.last_commit.author_email
198 198
199 199 user = User.get_from_cs_author(author)
200 200 if user:
201 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
202 202 author_counts[author] = author_counts.get(author, 0) + 1
203 203 email_counts[email] = email_counts.get(email, 0) + 1
204 204
205 205 continue
206 206
207 207 for annotation in node.annotate:
208 208 line_no, commit_id, get_commit_func, line_text = annotation
209 209 if line_no in lines:
210 210 if commit_id not in _commit_cache:
211 211 _commit_cache[commit_id] = get_commit_func()
212 212 commit = _commit_cache[commit_id]
213 213 author = commit.author
214 214 email = commit.author_email
215 215 user = User.get_from_cs_author(author)
216 216 if user:
217 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
218 218 author_counts[author] = author_counts.get(author, 0) + 1
219 219 email_counts[email] = email_counts.get(email, 0) + 1
220 220
221 221 log.debug('Default reviewers processing finished')
222 222
223 223 return {
224 224 'commits': commits,
225 225 'files': all_files_changes,
226 226 'stats': stats,
227 227 'ancestor': ancestor_id,
228 228 # original authors of modified files
229 229 'original_authors': {
230 230 'users': user_counts,
231 231 'authors': author_counts,
232 232 'emails': email_counts,
233 233 },
234 234 'commit_authors': commit_authors
235 235 }
236 236
237 237
238 238 class PullRequestModel(BaseModel):
239 239
240 240 cls = PullRequest
241 241
242 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
243 243
244 244 UPDATE_STATUS_MESSAGES = {
245 245 UpdateFailureReason.NONE: lazy_ugettext(
246 246 'Pull request update successful.'),
247 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
248 248 'Pull request update failed because of an unknown error.'),
249 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
250 250 'No update needed because the source and target have not changed.'),
251 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
252 252 'Pull request cannot be updated because the reference type is '
253 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
254 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
255 255 'This pull request cannot be updated because the target '
256 256 'reference is missing.'),
257 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
258 258 'This pull request cannot be updated because the source '
259 259 'reference is missing.'),
260 260 }
261 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
262 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
263 263
264 264 def __get_pull_request(self, pull_request):
265 265 return self._get_instance((
266 266 PullRequest, PullRequestVersion), pull_request)
267 267
268 268 def _check_perms(self, perms, pull_request, user, api=False):
269 269 if not api:
270 270 return h.HasRepoPermissionAny(*perms)(
271 271 user=user, repo_name=pull_request.target_repo.repo_name)
272 272 else:
273 273 return h.HasRepoPermissionAnyApi(*perms)(
274 274 user=user, repo_name=pull_request.target_repo.repo_name)
275 275
276 276 def check_user_read(self, pull_request, user, api=False):
277 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
278 278 return self._check_perms(_perms, pull_request, user, api)
279 279
280 280 def check_user_merge(self, pull_request, user, api=False):
281 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
282 282 return self._check_perms(_perms, pull_request, user, api)
283 283
284 284 def check_user_update(self, pull_request, user, api=False):
285 285 owner = user.user_id == pull_request.user_id
286 286 return self.check_user_merge(pull_request, user, api) or owner
287 287
288 288 def check_user_delete(self, pull_request, user):
289 289 owner = user.user_id == pull_request.user_id
290 290 _perms = ('repository.admin',)
291 291 return self._check_perms(_perms, pull_request, user) or owner
292 292
293 293 def is_user_reviewer(self, pull_request, user):
294 294 return user.user_id in [
295 295 x.user_id for x in
296 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
297 297 if x.user
298 298 ]
299 299
300 300 def check_user_change_status(self, pull_request, user, api=False):
301 301 return self.check_user_update(pull_request, user, api) \
302 302 or self.is_user_reviewer(pull_request, user)
303 303
304 304 def check_user_comment(self, pull_request, user):
305 305 owner = user.user_id == pull_request.user_id
306 306 return self.check_user_read(pull_request, user) or owner
307 307
308 308 def get(self, pull_request):
309 309 return self.__get_pull_request(pull_request)
310 310
311 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
312 312 statuses=None, opened_by=None, order_by=None,
313 313 order_dir='desc', only_created=False):
314 314 repo = None
315 315 if repo_name:
316 316 repo = self._get_repo(repo_name)
317 317
318 318 q = PullRequest.query()
319 319
320 320 if search_q:
321 321 like_expression = u'%{}%'.format(safe_unicode(search_q))
322 322 q = q.join(User)
323 323 q = q.filter(or_(
324 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
325 325 User.username.ilike(like_expression),
326 326 PullRequest.title.ilike(like_expression),
327 327 PullRequest.description.ilike(like_expression),
328 328 ))
329 329
330 330 # source or target
331 331 if repo and source:
332 332 q = q.filter(PullRequest.source_repo == repo)
333 333 elif repo:
334 334 q = q.filter(PullRequest.target_repo == repo)
335 335
336 336 # closed,opened
337 337 if statuses:
338 338 q = q.filter(PullRequest.status.in_(statuses))
339 339
340 340 # opened by filter
341 341 if opened_by:
342 342 q = q.filter(PullRequest.user_id.in_(opened_by))
343 343
344 344 # only get those that are in "created" state
345 345 if only_created:
346 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'id': PullRequest.pull_request_id,
352 352 'title': PullRequest.title,
353 353 'updated_on_raw': PullRequest.updated_on,
354 354 'target_repo': PullRequest.target_repo_id
355 355 }
356 356 if order_dir == 'asc':
357 357 q = q.order_by(order_map[order_by].asc())
358 358 else:
359 359 q = q.order_by(order_map[order_by].desc())
360 360
361 361 return q
362 362
363 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
364 364 opened_by=None):
365 365 """
366 366 Count the number of pull requests for a specific repository.
367 367
368 368 :param repo_name: target or source repo
369 369 :param search_q: filter by text
370 370 :param source: boolean flag to specify if repo_name refers to source
371 371 :param statuses: list of pull request statuses
372 372 :param opened_by: author user of the pull request
373 373 :returns: int number of pull requests
374 374 """
375 375 q = self._prepare_get_all_query(
376 376 repo_name, search_q=search_q, source=source, statuses=statuses,
377 377 opened_by=opened_by)
378 378
379 379 return q.count()
380 380
381 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
382 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
383 383 """
384 384 Get all pull requests for a specific repository.
385 385
386 386 :param repo_name: target or source repo
387 387 :param search_q: filter by text
388 388 :param source: boolean flag to specify if repo_name refers to source
389 389 :param statuses: list of pull request statuses
390 390 :param opened_by: author user of the pull request
391 391 :param offset: pagination offset
392 392 :param length: length of returned list
393 393 :param order_by: order of the returned list
394 394 :param order_dir: 'asc' or 'desc' ordering direction
395 395 :returns: list of pull requests
396 396 """
397 397 q = self._prepare_get_all_query(
398 398 repo_name, search_q=search_q, source=source, statuses=statuses,
399 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
400 400
401 401 if length:
402 402 pull_requests = q.limit(length).offset(offset).all()
403 403 else:
404 404 pull_requests = q.all()
405 405
406 406 return pull_requests
407 407
408 408 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
409 409 opened_by=None):
410 410 """
411 411 Count the number of pull requests for a specific repository that are
412 412 awaiting review.
413 413
414 414 :param repo_name: target or source repo
415 415 :param search_q: filter by text
416 416 :param source: boolean flag to specify if repo_name refers to source
417 417 :param statuses: list of pull request statuses
418 418 :param opened_by: author user of the pull request
419 419 :returns: int number of pull requests
420 420 """
421 421 pull_requests = self.get_awaiting_review(
422 422 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
423 423
424 424 return len(pull_requests)
425 425
426 426 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
427 427 opened_by=None, offset=0, length=None,
428 428 order_by=None, order_dir='desc'):
429 429 """
430 430 Get all pull requests for a specific repository that are awaiting
431 431 review.
432 432
433 433 :param repo_name: target or source repo
434 434 :param search_q: filter by text
435 435 :param source: boolean flag to specify if repo_name refers to source
436 436 :param statuses: list of pull request statuses
437 437 :param opened_by: author user of the pull request
438 438 :param offset: pagination offset
439 439 :param length: length of returned list
440 440 :param order_by: order of the returned list
441 441 :param order_dir: 'asc' or 'desc' ordering direction
442 442 :returns: list of pull requests
443 443 """
444 444 pull_requests = self.get_all(
445 445 repo_name, search_q=search_q, source=source, statuses=statuses,
446 446 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
447 447
448 448 _filtered_pull_requests = []
449 449 for pr in pull_requests:
450 450 status = pr.calculated_review_status()
451 451 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
452 452 ChangesetStatus.STATUS_UNDER_REVIEW]:
453 453 _filtered_pull_requests.append(pr)
454 454 if length:
455 455 return _filtered_pull_requests[offset:offset+length]
456 456 else:
457 457 return _filtered_pull_requests
458 458
459 459 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
460 460 opened_by=None, user_id=None):
461 461 """
462 462 Count the number of pull requests for a specific repository that are
463 463 awaiting review from a specific user.
464 464
465 465 :param repo_name: target or source repo
466 466 :param search_q: filter by text
467 467 :param source: boolean flag to specify if repo_name refers to source
468 468 :param statuses: list of pull request statuses
469 469 :param opened_by: author user of the pull request
470 470 :param user_id: reviewer user of the pull request
471 471 :returns: int number of pull requests
472 472 """
473 473 pull_requests = self.get_awaiting_my_review(
474 474 repo_name, search_q=search_q, source=source, statuses=statuses,
475 475 opened_by=opened_by, user_id=user_id)
476 476
477 477 return len(pull_requests)
478 478
479 479 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
480 480 opened_by=None, user_id=None, offset=0,
481 481 length=None, order_by=None, order_dir='desc'):
482 482 """
483 483 Get all pull requests for a specific repository that are awaiting
484 484 review from a specific user.
485 485
486 486 :param repo_name: target or source repo
487 487 :param search_q: filter by text
488 488 :param source: boolean flag to specify if repo_name refers to source
489 489 :param statuses: list of pull request statuses
490 490 :param opened_by: author user of the pull request
491 491 :param user_id: reviewer user of the pull request
492 492 :param offset: pagination offset
493 493 :param length: length of returned list
494 494 :param order_by: order of the returned list
495 495 :param order_dir: 'asc' or 'desc' ordering direction
496 496 :returns: list of pull requests
497 497 """
498 498 pull_requests = self.get_all(
499 499 repo_name, search_q=search_q, source=source, statuses=statuses,
500 500 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
501 501
502 502 _my = PullRequestModel().get_not_reviewed(user_id)
503 503 my_participation = []
504 504 for pr in pull_requests:
505 505 if pr in _my:
506 506 my_participation.append(pr)
507 507 _filtered_pull_requests = my_participation
508 508 if length:
509 509 return _filtered_pull_requests[offset:offset+length]
510 510 else:
511 511 return _filtered_pull_requests
512 512
513 513 def get_not_reviewed(self, user_id):
514 514 return [
515 515 x.pull_request for x in PullRequestReviewers.query().filter(
516 516 PullRequestReviewers.user_id == user_id).all()
517 517 ]
518 518
519 519 def _prepare_participating_query(self, user_id=None, statuses=None, query='',
520 520 order_by=None, order_dir='desc'):
521 521 q = PullRequest.query()
522 522 if user_id:
523 523 reviewers_subquery = Session().query(
524 524 PullRequestReviewers.pull_request_id).filter(
525 525 PullRequestReviewers.user_id == user_id).subquery()
526 526 user_filter = or_(
527 527 PullRequest.user_id == user_id,
528 528 PullRequest.pull_request_id.in_(reviewers_subquery)
529 529 )
530 530 q = PullRequest.query().filter(user_filter)
531 531
532 532 # closed,opened
533 533 if statuses:
534 534 q = q.filter(PullRequest.status.in_(statuses))
535 535
536 536 if query:
537 537 like_expression = u'%{}%'.format(safe_unicode(query))
538 538 q = q.join(User)
539 539 q = q.filter(or_(
540 540 cast(PullRequest.pull_request_id, String).ilike(like_expression),
541 541 User.username.ilike(like_expression),
542 542 PullRequest.title.ilike(like_expression),
543 543 PullRequest.description.ilike(like_expression),
544 544 ))
545 545 if order_by:
546 546 order_map = {
547 547 'name_raw': PullRequest.pull_request_id,
548 548 'title': PullRequest.title,
549 549 'updated_on_raw': PullRequest.updated_on,
550 550 'target_repo': PullRequest.target_repo_id
551 551 }
552 552 if order_dir == 'asc':
553 553 q = q.order_by(order_map[order_by].asc())
554 554 else:
555 555 q = q.order_by(order_map[order_by].desc())
556 556
557 557 return q
558 558
559 559 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
560 560 q = self._prepare_participating_query(user_id, statuses=statuses, query=query)
561 561 return q.count()
562 562
563 563 def get_im_participating_in(
564 564 self, user_id=None, statuses=None, query='', offset=0,
565 565 length=None, order_by=None, order_dir='desc'):
566 566 """
567 567 Get all Pull requests that i'm participating in, or i have opened
568 568 """
569 569
570 570 q = self._prepare_participating_query(
571 571 user_id, statuses=statuses, query=query, order_by=order_by,
572 572 order_dir=order_dir)
573 573
574 574 if length:
575 575 pull_requests = q.limit(length).offset(offset).all()
576 576 else:
577 577 pull_requests = q.all()
578 578
579 579 return pull_requests
580 580
581 581 def get_versions(self, pull_request):
582 582 """
583 583 returns version of pull request sorted by ID descending
584 584 """
585 585 return PullRequestVersion.query()\
586 586 .filter(PullRequestVersion.pull_request == pull_request)\
587 587 .order_by(PullRequestVersion.pull_request_version_id.asc())\
588 588 .all()
589 589
590 590 def get_pr_version(self, pull_request_id, version=None):
591 591 at_version = None
592 592
593 593 if version and version == 'latest':
594 594 pull_request_ver = PullRequest.get(pull_request_id)
595 595 pull_request_obj = pull_request_ver
596 596 _org_pull_request_obj = pull_request_obj
597 597 at_version = 'latest'
598 598 elif version:
599 599 pull_request_ver = PullRequestVersion.get_or_404(version)
600 600 pull_request_obj = pull_request_ver
601 601 _org_pull_request_obj = pull_request_ver.pull_request
602 602 at_version = pull_request_ver.pull_request_version_id
603 603 else:
604 604 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
605 605 pull_request_id)
606 606
607 607 pull_request_display_obj = PullRequest.get_pr_display_object(
608 608 pull_request_obj, _org_pull_request_obj)
609 609
610 610 return _org_pull_request_obj, pull_request_obj, \
611 611 pull_request_display_obj, at_version
612 612
613 613 def pr_commits_versions(self, versions):
614 614 """
615 615 Maps the pull-request commits into all known PR versions. This way we can obtain
616 616 each pr version the commit was introduced in.
617 617 """
618 618 commit_versions = collections.defaultdict(list)
619 619 num_versions = [x.pull_request_version_id for x in versions]
620 620 for ver in versions:
621 621 for commit_id in ver.revisions:
622 622 ver_idx = ChangesetComment.get_index_from_version(
623 623 ver.pull_request_version_id, num_versions=num_versions)
624 624 commit_versions[commit_id].append(ver_idx)
625 625 return commit_versions
626 626
627 627 def create(self, created_by, source_repo, source_ref, target_repo,
628 628 target_ref, revisions, reviewers, observers, title, description=None,
629 629 common_ancestor_id=None,
630 630 description_renderer=None,
631 631 reviewer_data=None, translator=None, auth_user=None):
632 632 translator = translator or get_current_request().translate
633 633
634 634 created_by_user = self._get_user(created_by)
635 635 auth_user = auth_user or created_by_user.AuthUser()
636 636 source_repo = self._get_repo(source_repo)
637 637 target_repo = self._get_repo(target_repo)
638 638
639 639 pull_request = PullRequest()
640 640 pull_request.source_repo = source_repo
641 641 pull_request.source_ref = source_ref
642 642 pull_request.target_repo = target_repo
643 643 pull_request.target_ref = target_ref
644 644 pull_request.revisions = revisions
645 645 pull_request.title = title
646 646 pull_request.description = description
647 647 pull_request.description_renderer = description_renderer
648 648 pull_request.author = created_by_user
649 649 pull_request.reviewer_data = reviewer_data
650 650 pull_request.pull_request_state = pull_request.STATE_CREATING
651 651 pull_request.common_ancestor_id = common_ancestor_id
652 652
653 653 Session().add(pull_request)
654 654 Session().flush()
655 655
656 656 reviewer_ids = set()
657 657 # members / reviewers
658 658 for reviewer_object in reviewers:
659 659 user_id, reasons, mandatory, role, rules = reviewer_object
660 660 user = self._get_user(user_id)
661 661
662 662 # skip duplicates
663 663 if user.user_id in reviewer_ids:
664 664 continue
665 665
666 666 reviewer_ids.add(user.user_id)
667 667
668 668 reviewer = PullRequestReviewers()
669 669 reviewer.user = user
670 670 reviewer.pull_request = pull_request
671 671 reviewer.reasons = reasons
672 672 reviewer.mandatory = mandatory
673 673 reviewer.role = role
674 674
675 675 # NOTE(marcink): pick only first rule for now
676 676 rule_id = list(rules)[0] if rules else None
677 677 rule = RepoReviewRule.get(rule_id) if rule_id else None
678 678 if rule:
679 679 review_group = rule.user_group_vote_rule(user_id)
680 680 # we check if this particular reviewer is member of a voting group
681 681 if review_group:
682 682 # NOTE(marcink):
683 683 # can be that user is member of more but we pick the first same,
684 684 # same as default reviewers algo
685 685 review_group = review_group[0]
686 686
687 687 rule_data = {
688 688 'rule_name':
689 689 rule.review_rule_name,
690 690 'rule_user_group_entry_id':
691 691 review_group.repo_review_rule_users_group_id,
692 692 'rule_user_group_name':
693 693 review_group.users_group.users_group_name,
694 694 'rule_user_group_members':
695 695 [x.user.username for x in review_group.users_group.members],
696 696 'rule_user_group_members_id':
697 697 [x.user.user_id for x in review_group.users_group.members],
698 698 }
699 699 # e.g {'vote_rule': -1, 'mandatory': True}
700 700 rule_data.update(review_group.rule_data())
701 701
702 702 reviewer.rule_data = rule_data
703 703
704 704 Session().add(reviewer)
705 705 Session().flush()
706 706
707 707 for observer_object in observers:
708 708 user_id, reasons, mandatory, role, rules = observer_object
709 709 user = self._get_user(user_id)
710 710
711 711 # skip duplicates from reviewers
712 712 if user.user_id in reviewer_ids:
713 713 continue
714 714
715 715 #reviewer_ids.add(user.user_id)
716 716
717 717 observer = PullRequestReviewers()
718 718 observer.user = user
719 719 observer.pull_request = pull_request
720 720 observer.reasons = reasons
721 721 observer.mandatory = mandatory
722 722 observer.role = role
723 723
724 724 # NOTE(marcink): pick only first rule for now
725 725 rule_id = list(rules)[0] if rules else None
726 726 rule = RepoReviewRule.get(rule_id) if rule_id else None
727 727 if rule:
728 728 # TODO(marcink): do we need this for observers ??
729 729 pass
730 730
731 731 Session().add(observer)
732 732 Session().flush()
733 733
734 734 # Set approval status to "Under Review" for all commits which are
735 735 # part of this pull request.
736 736 ChangesetStatusModel().set_status(
737 737 repo=target_repo,
738 738 status=ChangesetStatus.STATUS_UNDER_REVIEW,
739 739 user=created_by_user,
740 740 pull_request=pull_request
741 741 )
742 742 # we commit early at this point. This has to do with a fact
743 743 # that before queries do some row-locking. And because of that
744 744 # we need to commit and finish transaction before below validate call
745 745 # that for large repos could be long resulting in long row locks
746 746 Session().commit()
747 747
748 748 # prepare workspace, and run initial merge simulation. Set state during that
749 749 # operation
750 750 pull_request = PullRequest.get(pull_request.pull_request_id)
751 751
752 752 # set as merging, for merge simulation, and if finished to created so we mark
753 753 # simulation is working fine
754 754 with pull_request.set_state(PullRequest.STATE_MERGING,
755 755 final_state=PullRequest.STATE_CREATED) as state_obj:
756 756 MergeCheck.validate(
757 757 pull_request, auth_user=auth_user, translator=translator)
758 758
759 759 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
760 760 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
761 761
762 762 creation_data = pull_request.get_api_data(with_merge_state=False)
763 763 self._log_audit_action(
764 764 'repo.pull_request.create', {'data': creation_data},
765 765 auth_user, pull_request)
766 766
767 767 return pull_request
768 768
769 769 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
770 770 pull_request = self.__get_pull_request(pull_request)
771 771 target_scm = pull_request.target_repo.scm_instance()
772 772 if action == 'create':
773 773 trigger_hook = hooks_utils.trigger_create_pull_request_hook
774 774 elif action == 'merge':
775 775 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
776 776 elif action == 'close':
777 777 trigger_hook = hooks_utils.trigger_close_pull_request_hook
778 778 elif action == 'review_status_change':
779 779 trigger_hook = hooks_utils.trigger_review_pull_request_hook
780 780 elif action == 'update':
781 781 trigger_hook = hooks_utils.trigger_update_pull_request_hook
782 782 elif action == 'comment':
783 783 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
784 784 elif action == 'comment_edit':
785 785 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
786 786 else:
787 787 return
788 788
789 789 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
790 790 pull_request, action, trigger_hook)
791 791 trigger_hook(
792 792 username=user.username,
793 793 repo_name=pull_request.target_repo.repo_name,
794 794 repo_type=target_scm.alias,
795 795 pull_request=pull_request,
796 796 data=data)
797 797
798 798 def _get_commit_ids(self, pull_request):
799 799 """
800 800 Return the commit ids of the merged pull request.
801 801
802 802 This method is not dealing correctly yet with the lack of autoupdates
803 803 nor with the implicit target updates.
804 804 For example: if a commit in the source repo is already in the target it
805 805 will be reported anyways.
806 806 """
807 807 merge_rev = pull_request.merge_rev
808 808 if merge_rev is None:
809 809 raise ValueError('This pull request was not merged yet')
810 810
811 811 commit_ids = list(pull_request.revisions)
812 812 if merge_rev not in commit_ids:
813 813 commit_ids.append(merge_rev)
814 814
815 815 return commit_ids
816 816
817 817 def merge_repo(self, pull_request, user, extras):
818 818 log.debug("Merging pull request %s", pull_request.pull_request_id)
819 819 extras['user_agent'] = 'internal-merge'
820 820 merge_state = self._merge_pull_request(pull_request, user, extras)
821 821 if merge_state.executed:
822 822 log.debug("Merge was successful, updating the pull request comments.")
823 823 self._comment_and_close_pr(pull_request, user, merge_state)
824 824
825 825 self._log_audit_action(
826 826 'repo.pull_request.merge',
827 827 {'merge_state': merge_state.__dict__},
828 828 user, pull_request)
829 829
830 830 else:
831 831 log.warn("Merge failed, not updating the pull request.")
832 832 return merge_state
833 833
834 834 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
835 835 target_vcs = pull_request.target_repo.scm_instance()
836 836 source_vcs = pull_request.source_repo.scm_instance()
837 837
838 838 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
839 839 pr_id=pull_request.pull_request_id,
840 840 pr_title=pull_request.title,
841 841 source_repo=source_vcs.name,
842 842 source_ref_name=pull_request.source_ref_parts.name,
843 843 target_repo=target_vcs.name,
844 844 target_ref_name=pull_request.target_ref_parts.name,
845 845 )
846 846
847 847 workspace_id = self._workspace_id(pull_request)
848 848 repo_id = pull_request.target_repo.repo_id
849 849 use_rebase = self._use_rebase_for_merging(pull_request)
850 850 close_branch = self._close_branch_before_merging(pull_request)
851 851 user_name = self._user_name_for_merging(pull_request, user)
852 852
853 853 target_ref = self._refresh_reference(
854 854 pull_request.target_ref_parts, target_vcs)
855 855
856 856 callback_daemon, extras = prepare_callback_daemon(
857 857 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
858 858 host=vcs_settings.HOOKS_HOST,
859 859 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
860 860
861 861 with callback_daemon:
862 862 # TODO: johbo: Implement a clean way to run a config_override
863 863 # for a single call.
864 864 target_vcs.config.set(
865 865 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
866 866
867 867 merge_state = target_vcs.merge(
868 868 repo_id, workspace_id, target_ref, source_vcs,
869 869 pull_request.source_ref_parts,
870 870 user_name=user_name, user_email=user.email,
871 871 message=message, use_rebase=use_rebase,
872 872 close_branch=close_branch)
873 873 return merge_state
874 874
875 875 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
876 876 pull_request.merge_rev = merge_state.merge_ref.commit_id
877 877 pull_request.updated_on = datetime.datetime.now()
878 878 close_msg = close_msg or 'Pull request merged and closed'
879 879
880 880 CommentsModel().create(
881 881 text=safe_unicode(close_msg),
882 882 repo=pull_request.target_repo.repo_id,
883 883 user=user.user_id,
884 884 pull_request=pull_request.pull_request_id,
885 885 f_path=None,
886 886 line_no=None,
887 887 closing_pr=True
888 888 )
889 889
890 890 Session().add(pull_request)
891 891 Session().flush()
892 892 # TODO: paris: replace invalidation with less radical solution
893 893 ScmModel().mark_for_invalidation(
894 894 pull_request.target_repo.repo_name)
895 895 self.trigger_pull_request_hook(pull_request, user, 'merge')
896 896
897 897 def has_valid_update_type(self, pull_request):
898 898 source_ref_type = pull_request.source_ref_parts.type
899 899 return source_ref_type in self.REF_TYPES
900 900
901 901 def get_flow_commits(self, pull_request):
902 902
903 903 # source repo
904 904 source_ref_name = pull_request.source_ref_parts.name
905 905 source_ref_type = pull_request.source_ref_parts.type
906 906 source_ref_id = pull_request.source_ref_parts.commit_id
907 907 source_repo = pull_request.source_repo.scm_instance()
908 908
909 909 try:
910 910 if source_ref_type in self.REF_TYPES:
911 source_commit = source_repo.get_commit(source_ref_name)
911 source_commit = source_repo.get_commit(
912 source_ref_name, reference_obj=pull_request.source_ref_parts)
912 913 else:
913 914 source_commit = source_repo.get_commit(source_ref_id)
914 915 except CommitDoesNotExistError:
915 916 raise SourceRefMissing()
916 917
917 918 # target repo
918 919 target_ref_name = pull_request.target_ref_parts.name
919 920 target_ref_type = pull_request.target_ref_parts.type
920 921 target_ref_id = pull_request.target_ref_parts.commit_id
921 922 target_repo = pull_request.target_repo.scm_instance()
922 923
923 924 try:
924 925 if target_ref_type in self.REF_TYPES:
925 target_commit = target_repo.get_commit(target_ref_name)
926 target_commit = target_repo.get_commit(
927 target_ref_name, reference_obj=pull_request.target_ref_parts)
926 928 else:
927 929 target_commit = target_repo.get_commit(target_ref_id)
928 930 except CommitDoesNotExistError:
929 931 raise TargetRefMissing()
930 932
931 933 return source_commit, target_commit
932 934
933 935 def update_commits(self, pull_request, updating_user):
934 936 """
935 937 Get the updated list of commits for the pull request
936 938 and return the new pull request version and the list
937 939 of commits processed by this update action
938 940
939 941 updating_user is the user_object who triggered the update
940 942 """
941 943 pull_request = self.__get_pull_request(pull_request)
942 944 source_ref_type = pull_request.source_ref_parts.type
943 945 source_ref_name = pull_request.source_ref_parts.name
944 946 source_ref_id = pull_request.source_ref_parts.commit_id
945 947
946 948 target_ref_type = pull_request.target_ref_parts.type
947 949 target_ref_name = pull_request.target_ref_parts.name
948 950 target_ref_id = pull_request.target_ref_parts.commit_id
949 951
950 952 if not self.has_valid_update_type(pull_request):
951 953 log.debug("Skipping update of pull request %s due to ref type: %s",
952 954 pull_request, source_ref_type)
953 955 return UpdateResponse(
954 956 executed=False,
955 957 reason=UpdateFailureReason.WRONG_REF_TYPE,
956 958 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
957 959 source_changed=False, target_changed=False)
958 960
959 961 try:
960 962 source_commit, target_commit = self.get_flow_commits(pull_request)
961 963 except SourceRefMissing:
962 964 return UpdateResponse(
963 965 executed=False,
964 966 reason=UpdateFailureReason.MISSING_SOURCE_REF,
965 967 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
966 968 source_changed=False, target_changed=False)
967 969 except TargetRefMissing:
968 970 return UpdateResponse(
969 971 executed=False,
970 972 reason=UpdateFailureReason.MISSING_TARGET_REF,
971 973 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
972 974 source_changed=False, target_changed=False)
973 975
974 976 source_changed = source_ref_id != source_commit.raw_id
975 977 target_changed = target_ref_id != target_commit.raw_id
976 978
977 979 if not (source_changed or target_changed):
978 980 log.debug("Nothing changed in pull request %s", pull_request)
979 981 return UpdateResponse(
980 982 executed=False,
981 983 reason=UpdateFailureReason.NO_CHANGE,
982 984 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
983 985 source_changed=target_changed, target_changed=source_changed)
984 986
985 987 change_in_found = 'target repo' if target_changed else 'source repo'
986 988 log.debug('Updating pull request because of change in %s detected',
987 989 change_in_found)
988 990
989 991 # Finally there is a need for an update, in case of source change
990 992 # we create a new version, else just an update
991 993 if source_changed:
992 994 pull_request_version = self._create_version_from_snapshot(pull_request)
993 995 self._link_comments_to_version(pull_request_version)
994 996 else:
995 997 try:
996 998 ver = pull_request.versions[-1]
997 999 except IndexError:
998 1000 ver = None
999 1001
1000 1002 pull_request.pull_request_version_id = \
1001 1003 ver.pull_request_version_id if ver else None
1002 1004 pull_request_version = pull_request
1003 1005
1004 1006 source_repo = pull_request.source_repo.scm_instance()
1005 1007 target_repo = pull_request.target_repo.scm_instance()
1006 1008
1007 1009 # re-compute commit ids
1008 1010 old_commit_ids = pull_request.revisions
1009 1011 pre_load = ["author", "date", "message", "branch"]
1010 1012 commit_ranges = target_repo.compare(
1011 1013 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1012 1014 pre_load=pre_load)
1013 1015
1014 1016 target_ref = target_commit.raw_id
1015 1017 source_ref = source_commit.raw_id
1016 1018 ancestor_commit_id = target_repo.get_common_ancestor(
1017 1019 target_ref, source_ref, source_repo)
1018 1020
1019 1021 if not ancestor_commit_id:
1020 1022 raise ValueError(
1021 1023 'cannot calculate diff info without a common ancestor. '
1022 1024 'Make sure both repositories are related, and have a common forking commit.')
1023 1025
1024 1026 pull_request.common_ancestor_id = ancestor_commit_id
1025 1027
1026 1028 pull_request.source_ref = '%s:%s:%s' % (
1027 1029 source_ref_type, source_ref_name, source_commit.raw_id)
1028 1030 pull_request.target_ref = '%s:%s:%s' % (
1029 1031 target_ref_type, target_ref_name, ancestor_commit_id)
1030 1032
1031 1033 pull_request.revisions = [
1032 1034 commit.raw_id for commit in reversed(commit_ranges)]
1033 1035 pull_request.updated_on = datetime.datetime.now()
1034 1036 Session().add(pull_request)
1035 1037 new_commit_ids = pull_request.revisions
1036 1038
1037 1039 old_diff_data, new_diff_data = self._generate_update_diffs(
1038 1040 pull_request, pull_request_version)
1039 1041
1040 1042 # calculate commit and file changes
1041 1043 commit_changes = self._calculate_commit_id_changes(
1042 1044 old_commit_ids, new_commit_ids)
1043 1045 file_changes = self._calculate_file_changes(
1044 1046 old_diff_data, new_diff_data)
1045 1047
1046 1048 # set comments as outdated if DIFFS changed
1047 1049 CommentsModel().outdate_comments(
1048 1050 pull_request, old_diff_data=old_diff_data,
1049 1051 new_diff_data=new_diff_data)
1050 1052
1051 1053 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1052 1054 file_node_changes = (
1053 1055 file_changes.added or file_changes.modified or file_changes.removed)
1054 1056 pr_has_changes = valid_commit_changes or file_node_changes
1055 1057
1056 1058 # Add an automatic comment to the pull request, in case
1057 1059 # anything has changed
1058 1060 if pr_has_changes:
1059 1061 update_comment = CommentsModel().create(
1060 1062 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1061 1063 repo=pull_request.target_repo,
1062 1064 user=pull_request.author,
1063 1065 pull_request=pull_request,
1064 1066 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1065 1067
1066 1068 # Update status to "Under Review" for added commits
1067 1069 for commit_id in commit_changes.added:
1068 1070 ChangesetStatusModel().set_status(
1069 1071 repo=pull_request.source_repo,
1070 1072 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1071 1073 comment=update_comment,
1072 1074 user=pull_request.author,
1073 1075 pull_request=pull_request,
1074 1076 revision=commit_id)
1075 1077
1076 1078 # send update email to users
1077 1079 try:
1078 1080 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1079 1081 ancestor_commit_id=ancestor_commit_id,
1080 1082 commit_changes=commit_changes,
1081 1083 file_changes=file_changes)
1082 1084 except Exception:
1083 1085 log.exception('Failed to send email notification to users')
1084 1086
1085 1087 log.debug(
1086 1088 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1087 1089 'removed_ids: %s', pull_request.pull_request_id,
1088 1090 commit_changes.added, commit_changes.common, commit_changes.removed)
1089 1091 log.debug(
1090 1092 'Updated pull request with the following file changes: %s',
1091 1093 file_changes)
1092 1094
1093 1095 log.info(
1094 1096 "Updated pull request %s from commit %s to commit %s, "
1095 1097 "stored new version %s of this pull request.",
1096 1098 pull_request.pull_request_id, source_ref_id,
1097 1099 pull_request.source_ref_parts.commit_id,
1098 1100 pull_request_version.pull_request_version_id)
1099 1101 Session().commit()
1100 1102 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1101 1103
1102 1104 return UpdateResponse(
1103 1105 executed=True, reason=UpdateFailureReason.NONE,
1104 1106 old=pull_request, new=pull_request_version,
1105 1107 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1106 1108 source_changed=source_changed, target_changed=target_changed)
1107 1109
1108 1110 def _create_version_from_snapshot(self, pull_request):
1109 1111 version = PullRequestVersion()
1110 1112 version.title = pull_request.title
1111 1113 version.description = pull_request.description
1112 1114 version.status = pull_request.status
1113 1115 version.pull_request_state = pull_request.pull_request_state
1114 1116 version.created_on = datetime.datetime.now()
1115 1117 version.updated_on = pull_request.updated_on
1116 1118 version.user_id = pull_request.user_id
1117 1119 version.source_repo = pull_request.source_repo
1118 1120 version.source_ref = pull_request.source_ref
1119 1121 version.target_repo = pull_request.target_repo
1120 1122 version.target_ref = pull_request.target_ref
1121 1123
1122 1124 version._last_merge_source_rev = pull_request._last_merge_source_rev
1123 1125 version._last_merge_target_rev = pull_request._last_merge_target_rev
1124 1126 version.last_merge_status = pull_request.last_merge_status
1125 1127 version.last_merge_metadata = pull_request.last_merge_metadata
1126 1128 version.shadow_merge_ref = pull_request.shadow_merge_ref
1127 1129 version.merge_rev = pull_request.merge_rev
1128 1130 version.reviewer_data = pull_request.reviewer_data
1129 1131
1130 1132 version.revisions = pull_request.revisions
1131 1133 version.common_ancestor_id = pull_request.common_ancestor_id
1132 1134 version.pull_request = pull_request
1133 1135 Session().add(version)
1134 1136 Session().flush()
1135 1137
1136 1138 return version
1137 1139
1138 1140 def _generate_update_diffs(self, pull_request, pull_request_version):
1139 1141
1140 1142 diff_context = (
1141 1143 self.DIFF_CONTEXT +
1142 1144 CommentsModel.needed_extra_diff_context())
1143 1145 hide_whitespace_changes = False
1144 1146 source_repo = pull_request_version.source_repo
1145 1147 source_ref_id = pull_request_version.source_ref_parts.commit_id
1146 1148 target_ref_id = pull_request_version.target_ref_parts.commit_id
1147 1149 old_diff = self._get_diff_from_pr_or_version(
1148 1150 source_repo, source_ref_id, target_ref_id,
1149 1151 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1150 1152
1151 1153 source_repo = pull_request.source_repo
1152 1154 source_ref_id = pull_request.source_ref_parts.commit_id
1153 1155 target_ref_id = pull_request.target_ref_parts.commit_id
1154 1156
1155 1157 new_diff = self._get_diff_from_pr_or_version(
1156 1158 source_repo, source_ref_id, target_ref_id,
1157 1159 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1158 1160
1159 1161 old_diff_data = diffs.DiffProcessor(old_diff)
1160 1162 old_diff_data.prepare()
1161 1163 new_diff_data = diffs.DiffProcessor(new_diff)
1162 1164 new_diff_data.prepare()
1163 1165
1164 1166 return old_diff_data, new_diff_data
1165 1167
1166 1168 def _link_comments_to_version(self, pull_request_version):
1167 1169 """
1168 1170 Link all unlinked comments of this pull request to the given version.
1169 1171
1170 1172 :param pull_request_version: The `PullRequestVersion` to which
1171 1173 the comments shall be linked.
1172 1174
1173 1175 """
1174 1176 pull_request = pull_request_version.pull_request
1175 1177 comments = ChangesetComment.query()\
1176 1178 .filter(
1177 1179 # TODO: johbo: Should we query for the repo at all here?
1178 1180 # Pending decision on how comments of PRs are to be related
1179 1181 # to either the source repo, the target repo or no repo at all.
1180 1182 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1181 1183 ChangesetComment.pull_request == pull_request,
1182 1184 ChangesetComment.pull_request_version == None)\
1183 1185 .order_by(ChangesetComment.comment_id.asc())
1184 1186
1185 1187 # TODO: johbo: Find out why this breaks if it is done in a bulk
1186 1188 # operation.
1187 1189 for comment in comments:
1188 1190 comment.pull_request_version_id = (
1189 1191 pull_request_version.pull_request_version_id)
1190 1192 Session().add(comment)
1191 1193
1192 1194 def _calculate_commit_id_changes(self, old_ids, new_ids):
1193 1195 added = [x for x in new_ids if x not in old_ids]
1194 1196 common = [x for x in new_ids if x in old_ids]
1195 1197 removed = [x for x in old_ids if x not in new_ids]
1196 1198 total = new_ids
1197 1199 return ChangeTuple(added, common, removed, total)
1198 1200
1199 1201 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1200 1202
1201 1203 old_files = OrderedDict()
1202 1204 for diff_data in old_diff_data.parsed_diff:
1203 1205 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1204 1206
1205 1207 added_files = []
1206 1208 modified_files = []
1207 1209 removed_files = []
1208 1210 for diff_data in new_diff_data.parsed_diff:
1209 1211 new_filename = diff_data['filename']
1210 1212 new_hash = md5_safe(diff_data['raw_diff'])
1211 1213
1212 1214 old_hash = old_files.get(new_filename)
1213 1215 if not old_hash:
1214 1216 # file is not present in old diff, we have to figure out from parsed diff
1215 1217 # operation ADD/REMOVE
1216 1218 operations_dict = diff_data['stats']['ops']
1217 1219 if diffs.DEL_FILENODE in operations_dict:
1218 1220 removed_files.append(new_filename)
1219 1221 else:
1220 1222 added_files.append(new_filename)
1221 1223 else:
1222 1224 if new_hash != old_hash:
1223 1225 modified_files.append(new_filename)
1224 1226 # now remove a file from old, since we have seen it already
1225 1227 del old_files[new_filename]
1226 1228
1227 1229 # removed files is when there are present in old, but not in NEW,
1228 1230 # since we remove old files that are present in new diff, left-overs
1229 1231 # if any should be the removed files
1230 1232 removed_files.extend(old_files.keys())
1231 1233
1232 1234 return FileChangeTuple(added_files, modified_files, removed_files)
1233 1235
1234 1236 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1235 1237 """
1236 1238 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1237 1239 so it's always looking the same disregarding on which default
1238 1240 renderer system is using.
1239 1241
1240 1242 :param ancestor_commit_id: ancestor raw_id
1241 1243 :param changes: changes named tuple
1242 1244 :param file_changes: file changes named tuple
1243 1245
1244 1246 """
1245 1247 new_status = ChangesetStatus.get_status_lbl(
1246 1248 ChangesetStatus.STATUS_UNDER_REVIEW)
1247 1249
1248 1250 changed_files = (
1249 1251 file_changes.added + file_changes.modified + file_changes.removed)
1250 1252
1251 1253 params = {
1252 1254 'under_review_label': new_status,
1253 1255 'added_commits': changes.added,
1254 1256 'removed_commits': changes.removed,
1255 1257 'changed_files': changed_files,
1256 1258 'added_files': file_changes.added,
1257 1259 'modified_files': file_changes.modified,
1258 1260 'removed_files': file_changes.removed,
1259 1261 'ancestor_commit_id': ancestor_commit_id
1260 1262 }
1261 1263 renderer = RstTemplateRenderer()
1262 1264 return renderer.render('pull_request_update.mako', **params)
1263 1265
1264 1266 def edit(self, pull_request, title, description, description_renderer, user):
1265 1267 pull_request = self.__get_pull_request(pull_request)
1266 1268 old_data = pull_request.get_api_data(with_merge_state=False)
1267 1269 if pull_request.is_closed():
1268 1270 raise ValueError('This pull request is closed')
1269 1271 if title:
1270 1272 pull_request.title = title
1271 1273 pull_request.description = description
1272 1274 pull_request.updated_on = datetime.datetime.now()
1273 1275 pull_request.description_renderer = description_renderer
1274 1276 Session().add(pull_request)
1275 1277 self._log_audit_action(
1276 1278 'repo.pull_request.edit', {'old_data': old_data},
1277 1279 user, pull_request)
1278 1280
1279 1281 def update_reviewers(self, pull_request, reviewer_data, user):
1280 1282 """
1281 1283 Update the reviewers in the pull request
1282 1284
1283 1285 :param pull_request: the pr to update
1284 1286 :param reviewer_data: list of tuples
1285 1287 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1286 1288 :param user: current use who triggers this action
1287 1289 """
1288 1290
1289 1291 pull_request = self.__get_pull_request(pull_request)
1290 1292 if pull_request.is_closed():
1291 1293 raise ValueError('This pull request is closed')
1292 1294
1293 1295 reviewers = {}
1294 1296 for user_id, reasons, mandatory, role, rules in reviewer_data:
1295 1297 if isinstance(user_id, (int, compat.string_types)):
1296 1298 user_id = self._get_user(user_id).user_id
1297 1299 reviewers[user_id] = {
1298 1300 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1299 1301
1300 1302 reviewers_ids = set(reviewers.keys())
1301 1303 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1302 1304 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1303 1305
1304 1306 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1305 1307
1306 1308 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1307 1309 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1308 1310
1309 1311 log.debug("Adding %s reviewers", ids_to_add)
1310 1312 log.debug("Removing %s reviewers", ids_to_remove)
1311 1313 changed = False
1312 1314 added_audit_reviewers = []
1313 1315 removed_audit_reviewers = []
1314 1316
1315 1317 for uid in ids_to_add:
1316 1318 changed = True
1317 1319 _usr = self._get_user(uid)
1318 1320 reviewer = PullRequestReviewers()
1319 1321 reviewer.user = _usr
1320 1322 reviewer.pull_request = pull_request
1321 1323 reviewer.reasons = reviewers[uid]['reasons']
1322 1324 # NOTE(marcink): mandatory shouldn't be changed now
1323 1325 # reviewer.mandatory = reviewers[uid]['reasons']
1324 1326 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1325 1327 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1326 1328 Session().add(reviewer)
1327 1329 added_audit_reviewers.append(reviewer.get_dict())
1328 1330
1329 1331 for uid in ids_to_remove:
1330 1332 changed = True
1331 1333 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1332 1334 # This is an edge case that handles previous state of having the same reviewer twice.
1333 1335 # this CAN happen due to the lack of DB checks
1334 1336 reviewers = PullRequestReviewers.query()\
1335 1337 .filter(PullRequestReviewers.user_id == uid,
1336 1338 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1337 1339 PullRequestReviewers.pull_request == pull_request)\
1338 1340 .all()
1339 1341
1340 1342 for obj in reviewers:
1341 1343 added_audit_reviewers.append(obj.get_dict())
1342 1344 Session().delete(obj)
1343 1345
1344 1346 if changed:
1345 1347 Session().expire_all()
1346 1348 pull_request.updated_on = datetime.datetime.now()
1347 1349 Session().add(pull_request)
1348 1350
1349 1351 # finally store audit logs
1350 1352 for user_data in added_audit_reviewers:
1351 1353 self._log_audit_action(
1352 1354 'repo.pull_request.reviewer.add', {'data': user_data},
1353 1355 user, pull_request)
1354 1356 for user_data in removed_audit_reviewers:
1355 1357 self._log_audit_action(
1356 1358 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1357 1359 user, pull_request)
1358 1360
1359 1361 self.notify_reviewers(pull_request, ids_to_add, user)
1360 1362 return ids_to_add, ids_to_remove
1361 1363
1362 1364 def update_observers(self, pull_request, observer_data, user):
1363 1365 """
1364 1366 Update the observers in the pull request
1365 1367
1366 1368 :param pull_request: the pr to update
1367 1369 :param observer_data: list of tuples
1368 1370 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1369 1371 :param user: current use who triggers this action
1370 1372 """
1371 1373 pull_request = self.__get_pull_request(pull_request)
1372 1374 if pull_request.is_closed():
1373 1375 raise ValueError('This pull request is closed')
1374 1376
1375 1377 observers = {}
1376 1378 for user_id, reasons, mandatory, role, rules in observer_data:
1377 1379 if isinstance(user_id, (int, compat.string_types)):
1378 1380 user_id = self._get_user(user_id).user_id
1379 1381 observers[user_id] = {
1380 1382 'reasons': reasons, 'observers': mandatory, 'role': role}
1381 1383
1382 1384 observers_ids = set(observers.keys())
1383 1385 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1384 1386 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1385 1387
1386 1388 current_observers_ids = set([x.user.user_id for x in current_observers])
1387 1389
1388 1390 ids_to_add = observers_ids.difference(current_observers_ids)
1389 1391 ids_to_remove = current_observers_ids.difference(observers_ids)
1390 1392
1391 1393 log.debug("Adding %s observer", ids_to_add)
1392 1394 log.debug("Removing %s observer", ids_to_remove)
1393 1395 changed = False
1394 1396 added_audit_observers = []
1395 1397 removed_audit_observers = []
1396 1398
1397 1399 for uid in ids_to_add:
1398 1400 changed = True
1399 1401 _usr = self._get_user(uid)
1400 1402 observer = PullRequestReviewers()
1401 1403 observer.user = _usr
1402 1404 observer.pull_request = pull_request
1403 1405 observer.reasons = observers[uid]['reasons']
1404 1406 # NOTE(marcink): mandatory shouldn't be changed now
1405 1407 # observer.mandatory = observer[uid]['reasons']
1406 1408
1407 1409 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1408 1410 observer.role = PullRequestReviewers.ROLE_OBSERVER
1409 1411 Session().add(observer)
1410 1412 added_audit_observers.append(observer.get_dict())
1411 1413
1412 1414 for uid in ids_to_remove:
1413 1415 changed = True
1414 1416 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1415 1417 # This is an edge case that handles previous state of having the same reviewer twice.
1416 1418 # this CAN happen due to the lack of DB checks
1417 1419 observers = PullRequestReviewers.query()\
1418 1420 .filter(PullRequestReviewers.user_id == uid,
1419 1421 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1420 1422 PullRequestReviewers.pull_request == pull_request)\
1421 1423 .all()
1422 1424
1423 1425 for obj in observers:
1424 1426 added_audit_observers.append(obj.get_dict())
1425 1427 Session().delete(obj)
1426 1428
1427 1429 if changed:
1428 1430 Session().expire_all()
1429 1431 pull_request.updated_on = datetime.datetime.now()
1430 1432 Session().add(pull_request)
1431 1433
1432 1434 # finally store audit logs
1433 1435 for user_data in added_audit_observers:
1434 1436 self._log_audit_action(
1435 1437 'repo.pull_request.observer.add', {'data': user_data},
1436 1438 user, pull_request)
1437 1439 for user_data in removed_audit_observers:
1438 1440 self._log_audit_action(
1439 1441 'repo.pull_request.observer.delete', {'old_data': user_data},
1440 1442 user, pull_request)
1441 1443
1442 1444 self.notify_observers(pull_request, ids_to_add, user)
1443 1445 return ids_to_add, ids_to_remove
1444 1446
1445 1447 def get_url(self, pull_request, request=None, permalink=False):
1446 1448 if not request:
1447 1449 request = get_current_request()
1448 1450
1449 1451 if permalink:
1450 1452 return request.route_url(
1451 1453 'pull_requests_global',
1452 1454 pull_request_id=pull_request.pull_request_id,)
1453 1455 else:
1454 1456 return request.route_url('pullrequest_show',
1455 1457 repo_name=safe_str(pull_request.target_repo.repo_name),
1456 1458 pull_request_id=pull_request.pull_request_id,)
1457 1459
1458 1460 def get_shadow_clone_url(self, pull_request, request=None):
1459 1461 """
1460 1462 Returns qualified url pointing to the shadow repository. If this pull
1461 1463 request is closed there is no shadow repository and ``None`` will be
1462 1464 returned.
1463 1465 """
1464 1466 if pull_request.is_closed():
1465 1467 return None
1466 1468 else:
1467 1469 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1468 1470 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1469 1471
1470 1472 def _notify_reviewers(self, pull_request, user_ids, role, user):
1471 1473 # notification to reviewers/observers
1472 1474 if not user_ids:
1473 1475 return
1474 1476
1475 1477 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1476 1478
1477 1479 pull_request_obj = pull_request
1478 1480 # get the current participants of this pull request
1479 1481 recipients = user_ids
1480 1482 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1481 1483
1482 1484 pr_source_repo = pull_request_obj.source_repo
1483 1485 pr_target_repo = pull_request_obj.target_repo
1484 1486
1485 1487 pr_url = h.route_url('pullrequest_show',
1486 1488 repo_name=pr_target_repo.repo_name,
1487 1489 pull_request_id=pull_request_obj.pull_request_id,)
1488 1490
1489 1491 # set some variables for email notification
1490 1492 pr_target_repo_url = h.route_url(
1491 1493 'repo_summary', repo_name=pr_target_repo.repo_name)
1492 1494
1493 1495 pr_source_repo_url = h.route_url(
1494 1496 'repo_summary', repo_name=pr_source_repo.repo_name)
1495 1497
1496 1498 # pull request specifics
1497 1499 pull_request_commits = [
1498 1500 (x.raw_id, x.message)
1499 1501 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1500 1502
1501 1503 current_rhodecode_user = user
1502 1504 kwargs = {
1503 1505 'user': current_rhodecode_user,
1504 1506 'pull_request_author': pull_request.author,
1505 1507 'pull_request': pull_request_obj,
1506 1508 'pull_request_commits': pull_request_commits,
1507 1509
1508 1510 'pull_request_target_repo': pr_target_repo,
1509 1511 'pull_request_target_repo_url': pr_target_repo_url,
1510 1512
1511 1513 'pull_request_source_repo': pr_source_repo,
1512 1514 'pull_request_source_repo_url': pr_source_repo_url,
1513 1515
1514 1516 'pull_request_url': pr_url,
1515 1517 'thread_ids': [pr_url],
1516 1518 'user_role': role
1517 1519 }
1518 1520
1519 1521 # create notification objects, and emails
1520 1522 NotificationModel().create(
1521 1523 created_by=current_rhodecode_user,
1522 1524 notification_subject='', # Filled in based on the notification_type
1523 1525 notification_body='', # Filled in based on the notification_type
1524 1526 notification_type=notification_type,
1525 1527 recipients=recipients,
1526 1528 email_kwargs=kwargs,
1527 1529 )
1528 1530
1529 1531 def notify_reviewers(self, pull_request, reviewers_ids, user):
1530 1532 return self._notify_reviewers(pull_request, reviewers_ids,
1531 1533 PullRequestReviewers.ROLE_REVIEWER, user)
1532 1534
1533 1535 def notify_observers(self, pull_request, observers_ids, user):
1534 1536 return self._notify_reviewers(pull_request, observers_ids,
1535 1537 PullRequestReviewers.ROLE_OBSERVER, user)
1536 1538
1537 1539 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1538 1540 commit_changes, file_changes):
1539 1541
1540 1542 updating_user_id = updating_user.user_id
1541 1543 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1542 1544 # NOTE(marcink): send notification to all other users except to
1543 1545 # person who updated the PR
1544 1546 recipients = reviewers.difference(set([updating_user_id]))
1545 1547
1546 1548 log.debug('Notify following recipients about pull-request update %s', recipients)
1547 1549
1548 1550 pull_request_obj = pull_request
1549 1551
1550 1552 # send email about the update
1551 1553 changed_files = (
1552 1554 file_changes.added + file_changes.modified + file_changes.removed)
1553 1555
1554 1556 pr_source_repo = pull_request_obj.source_repo
1555 1557 pr_target_repo = pull_request_obj.target_repo
1556 1558
1557 1559 pr_url = h.route_url('pullrequest_show',
1558 1560 repo_name=pr_target_repo.repo_name,
1559 1561 pull_request_id=pull_request_obj.pull_request_id,)
1560 1562
1561 1563 # set some variables for email notification
1562 1564 pr_target_repo_url = h.route_url(
1563 1565 'repo_summary', repo_name=pr_target_repo.repo_name)
1564 1566
1565 1567 pr_source_repo_url = h.route_url(
1566 1568 'repo_summary', repo_name=pr_source_repo.repo_name)
1567 1569
1568 1570 email_kwargs = {
1569 1571 'date': datetime.datetime.now(),
1570 1572 'updating_user': updating_user,
1571 1573
1572 1574 'pull_request': pull_request_obj,
1573 1575
1574 1576 'pull_request_target_repo': pr_target_repo,
1575 1577 'pull_request_target_repo_url': pr_target_repo_url,
1576 1578
1577 1579 'pull_request_source_repo': pr_source_repo,
1578 1580 'pull_request_source_repo_url': pr_source_repo_url,
1579 1581
1580 1582 'pull_request_url': pr_url,
1581 1583
1582 1584 'ancestor_commit_id': ancestor_commit_id,
1583 1585 'added_commits': commit_changes.added,
1584 1586 'removed_commits': commit_changes.removed,
1585 1587 'changed_files': changed_files,
1586 1588 'added_files': file_changes.added,
1587 1589 'modified_files': file_changes.modified,
1588 1590 'removed_files': file_changes.removed,
1589 1591 'thread_ids': [pr_url],
1590 1592 }
1591 1593
1592 1594 # create notification objects, and emails
1593 1595 NotificationModel().create(
1594 1596 created_by=updating_user,
1595 1597 notification_subject='', # Filled in based on the notification_type
1596 1598 notification_body='', # Filled in based on the notification_type
1597 1599 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1598 1600 recipients=recipients,
1599 1601 email_kwargs=email_kwargs,
1600 1602 )
1601 1603
1602 1604 def delete(self, pull_request, user=None):
1603 1605 if not user:
1604 1606 user = getattr(get_current_rhodecode_user(), 'username', None)
1605 1607
1606 1608 pull_request = self.__get_pull_request(pull_request)
1607 1609 old_data = pull_request.get_api_data(with_merge_state=False)
1608 1610 self._cleanup_merge_workspace(pull_request)
1609 1611 self._log_audit_action(
1610 1612 'repo.pull_request.delete', {'old_data': old_data},
1611 1613 user, pull_request)
1612 1614 Session().delete(pull_request)
1613 1615
1614 1616 def close_pull_request(self, pull_request, user):
1615 1617 pull_request = self.__get_pull_request(pull_request)
1616 1618 self._cleanup_merge_workspace(pull_request)
1617 1619 pull_request.status = PullRequest.STATUS_CLOSED
1618 1620 pull_request.updated_on = datetime.datetime.now()
1619 1621 Session().add(pull_request)
1620 1622 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1621 1623
1622 1624 pr_data = pull_request.get_api_data(with_merge_state=False)
1623 1625 self._log_audit_action(
1624 1626 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1625 1627
1626 1628 def close_pull_request_with_comment(
1627 1629 self, pull_request, user, repo, message=None, auth_user=None):
1628 1630
1629 1631 pull_request_review_status = pull_request.calculated_review_status()
1630 1632
1631 1633 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1632 1634 # approved only if we have voting consent
1633 1635 status = ChangesetStatus.STATUS_APPROVED
1634 1636 else:
1635 1637 status = ChangesetStatus.STATUS_REJECTED
1636 1638 status_lbl = ChangesetStatus.get_status_lbl(status)
1637 1639
1638 1640 default_message = (
1639 1641 'Closing with status change {transition_icon} {status}.'
1640 1642 ).format(transition_icon='>', status=status_lbl)
1641 1643 text = message or default_message
1642 1644
1643 1645 # create a comment, and link it to new status
1644 1646 comment = CommentsModel().create(
1645 1647 text=text,
1646 1648 repo=repo.repo_id,
1647 1649 user=user.user_id,
1648 1650 pull_request=pull_request.pull_request_id,
1649 1651 status_change=status_lbl,
1650 1652 status_change_type=status,
1651 1653 closing_pr=True,
1652 1654 auth_user=auth_user,
1653 1655 )
1654 1656
1655 1657 # calculate old status before we change it
1656 1658 old_calculated_status = pull_request.calculated_review_status()
1657 1659 ChangesetStatusModel().set_status(
1658 1660 repo.repo_id,
1659 1661 status,
1660 1662 user.user_id,
1661 1663 comment=comment,
1662 1664 pull_request=pull_request.pull_request_id
1663 1665 )
1664 1666
1665 1667 Session().flush()
1666 1668
1667 1669 self.trigger_pull_request_hook(pull_request, user, 'comment',
1668 1670 data={'comment': comment})
1669 1671
1670 1672 # we now calculate the status of pull request again, and based on that
1671 1673 # calculation trigger status change. This might happen in cases
1672 1674 # that non-reviewer admin closes a pr, which means his vote doesn't
1673 1675 # change the status, while if he's a reviewer this might change it.
1674 1676 calculated_status = pull_request.calculated_review_status()
1675 1677 if old_calculated_status != calculated_status:
1676 1678 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1677 1679 data={'status': calculated_status})
1678 1680
1679 1681 # finally close the PR
1680 1682 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1681 1683
1682 1684 return comment, status
1683 1685
1684 1686 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1685 1687 _ = translator or get_current_request().translate
1686 1688
1687 1689 if not self._is_merge_enabled(pull_request):
1688 1690 return None, False, _('Server-side pull request merging is disabled.')
1689 1691
1690 1692 if pull_request.is_closed():
1691 1693 return None, False, _('This pull request is closed.')
1692 1694
1693 1695 merge_possible, msg = self._check_repo_requirements(
1694 1696 target=pull_request.target_repo, source=pull_request.source_repo,
1695 1697 translator=_)
1696 1698 if not merge_possible:
1697 1699 return None, merge_possible, msg
1698 1700
1699 1701 try:
1700 1702 merge_response = self._try_merge(
1701 1703 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1702 1704 log.debug("Merge response: %s", merge_response)
1703 1705 return merge_response, merge_response.possible, merge_response.merge_status_message
1704 1706 except NotImplementedError:
1705 1707 return None, False, _('Pull request merging is not supported.')
1706 1708
1707 1709 def _check_repo_requirements(self, target, source, translator):
1708 1710 """
1709 1711 Check if `target` and `source` have compatible requirements.
1710 1712
1711 1713 Currently this is just checking for largefiles.
1712 1714 """
1713 1715 _ = translator
1714 1716 target_has_largefiles = self._has_largefiles(target)
1715 1717 source_has_largefiles = self._has_largefiles(source)
1716 1718 merge_possible = True
1717 1719 message = u''
1718 1720
1719 1721 if target_has_largefiles != source_has_largefiles:
1720 1722 merge_possible = False
1721 1723 if source_has_largefiles:
1722 1724 message = _(
1723 1725 'Target repository large files support is disabled.')
1724 1726 else:
1725 1727 message = _(
1726 1728 'Source repository large files support is disabled.')
1727 1729
1728 1730 return merge_possible, message
1729 1731
1730 1732 def _has_largefiles(self, repo):
1731 1733 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1732 1734 'extensions', 'largefiles')
1733 1735 return largefiles_ui and largefiles_ui[0].active
1734 1736
1735 1737 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1736 1738 """
1737 1739 Try to merge the pull request and return the merge status.
1738 1740 """
1739 1741 log.debug(
1740 1742 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1741 1743 pull_request.pull_request_id, force_shadow_repo_refresh)
1742 1744 target_vcs = pull_request.target_repo.scm_instance()
1743 1745 # Refresh the target reference.
1744 1746 try:
1745 1747 target_ref = self._refresh_reference(
1746 1748 pull_request.target_ref_parts, target_vcs)
1747 1749 except CommitDoesNotExistError:
1748 1750 merge_state = MergeResponse(
1749 1751 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1750 1752 metadata={'target_ref': pull_request.target_ref_parts})
1751 1753 return merge_state
1752 1754
1753 1755 target_locked = pull_request.target_repo.locked
1754 1756 if target_locked and target_locked[0]:
1755 1757 locked_by = 'user:{}'.format(target_locked[0])
1756 1758 log.debug("The target repository is locked by %s.", locked_by)
1757 1759 merge_state = MergeResponse(
1758 1760 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1759 1761 metadata={'locked_by': locked_by})
1760 1762 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1761 1763 pull_request, target_ref):
1762 1764 log.debug("Refreshing the merge status of the repository.")
1763 1765 merge_state = self._refresh_merge_state(
1764 1766 pull_request, target_vcs, target_ref)
1765 1767 else:
1766 1768 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1767 1769 metadata = {
1768 1770 'unresolved_files': '',
1769 1771 'target_ref': pull_request.target_ref_parts,
1770 1772 'source_ref': pull_request.source_ref_parts,
1771 1773 }
1772 1774 if pull_request.last_merge_metadata:
1773 1775 metadata.update(pull_request.last_merge_metadata_parsed)
1774 1776
1775 1777 if not possible and target_ref.type == 'branch':
1776 1778 # NOTE(marcink): case for mercurial multiple heads on branch
1777 1779 heads = target_vcs._heads(target_ref.name)
1778 1780 if len(heads) != 1:
1779 1781 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1780 1782 metadata.update({
1781 1783 'heads': heads
1782 1784 })
1783 1785
1784 1786 merge_state = MergeResponse(
1785 1787 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1786 1788
1787 1789 return merge_state
1788 1790
1789 1791 def _refresh_reference(self, reference, vcs_repository):
1790 1792 if reference.type in self.UPDATABLE_REF_TYPES:
1791 1793 name_or_id = reference.name
1792 1794 else:
1793 1795 name_or_id = reference.commit_id
1794 1796
1795 1797 refreshed_commit = vcs_repository.get_commit(name_or_id)
1796 1798 refreshed_reference = Reference(
1797 1799 reference.type, reference.name, refreshed_commit.raw_id)
1798 1800 return refreshed_reference
1799 1801
1800 1802 def _needs_merge_state_refresh(self, pull_request, target_reference):
1801 1803 return not(
1802 1804 pull_request.revisions and
1803 1805 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1804 1806 target_reference.commit_id == pull_request._last_merge_target_rev)
1805 1807
1806 1808 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1807 1809 workspace_id = self._workspace_id(pull_request)
1808 1810 source_vcs = pull_request.source_repo.scm_instance()
1809 1811 repo_id = pull_request.target_repo.repo_id
1810 1812 use_rebase = self._use_rebase_for_merging(pull_request)
1811 1813 close_branch = self._close_branch_before_merging(pull_request)
1812 1814 merge_state = target_vcs.merge(
1813 1815 repo_id, workspace_id,
1814 1816 target_reference, source_vcs, pull_request.source_ref_parts,
1815 1817 dry_run=True, use_rebase=use_rebase,
1816 1818 close_branch=close_branch)
1817 1819
1818 1820 # Do not store the response if there was an unknown error.
1819 1821 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1820 1822 pull_request._last_merge_source_rev = \
1821 1823 pull_request.source_ref_parts.commit_id
1822 1824 pull_request._last_merge_target_rev = target_reference.commit_id
1823 1825 pull_request.last_merge_status = merge_state.failure_reason
1824 1826 pull_request.last_merge_metadata = merge_state.metadata
1825 1827
1826 1828 pull_request.shadow_merge_ref = merge_state.merge_ref
1827 1829 Session().add(pull_request)
1828 1830 Session().commit()
1829 1831
1830 1832 return merge_state
1831 1833
1832 1834 def _workspace_id(self, pull_request):
1833 1835 workspace_id = 'pr-%s' % pull_request.pull_request_id
1834 1836 return workspace_id
1835 1837
1836 1838 def generate_repo_data(self, repo, commit_id=None, branch=None,
1837 1839 bookmark=None, translator=None):
1838 1840 from rhodecode.model.repo import RepoModel
1839 1841
1840 1842 all_refs, selected_ref = \
1841 1843 self._get_repo_pullrequest_sources(
1842 1844 repo.scm_instance(), commit_id=commit_id,
1843 1845 branch=branch, bookmark=bookmark, translator=translator)
1844 1846
1845 1847 refs_select2 = []
1846 1848 for element in all_refs:
1847 1849 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1848 1850 refs_select2.append({'text': element[1], 'children': children})
1849 1851
1850 1852 return {
1851 1853 'user': {
1852 1854 'user_id': repo.user.user_id,
1853 1855 'username': repo.user.username,
1854 1856 'firstname': repo.user.first_name,
1855 1857 'lastname': repo.user.last_name,
1856 1858 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1857 1859 },
1858 1860 'name': repo.repo_name,
1859 1861 'link': RepoModel().get_url(repo),
1860 1862 'description': h.chop_at_smart(repo.description_safe, '\n'),
1861 1863 'refs': {
1862 1864 'all_refs': all_refs,
1863 1865 'selected_ref': selected_ref,
1864 1866 'select2_refs': refs_select2
1865 1867 }
1866 1868 }
1867 1869
1868 1870 def generate_pullrequest_title(self, source, source_ref, target):
1869 1871 return u'{source}#{at_ref} to {target}'.format(
1870 1872 source=source,
1871 1873 at_ref=source_ref,
1872 1874 target=target,
1873 1875 )
1874 1876
1875 1877 def _cleanup_merge_workspace(self, pull_request):
1876 1878 # Merging related cleanup
1877 1879 repo_id = pull_request.target_repo.repo_id
1878 1880 target_scm = pull_request.target_repo.scm_instance()
1879 1881 workspace_id = self._workspace_id(pull_request)
1880 1882
1881 1883 try:
1882 1884 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1883 1885 except NotImplementedError:
1884 1886 pass
1885 1887
1886 1888 def _get_repo_pullrequest_sources(
1887 1889 self, repo, commit_id=None, branch=None, bookmark=None,
1888 1890 translator=None):
1889 1891 """
1890 1892 Return a structure with repo's interesting commits, suitable for
1891 1893 the selectors in pullrequest controller
1892 1894
1893 1895 :param commit_id: a commit that must be in the list somehow
1894 1896 and selected by default
1895 1897 :param branch: a branch that must be in the list and selected
1896 1898 by default - even if closed
1897 1899 :param bookmark: a bookmark that must be in the list and selected
1898 1900 """
1899 1901 _ = translator or get_current_request().translate
1900 1902
1901 1903 commit_id = safe_str(commit_id) if commit_id else None
1902 1904 branch = safe_unicode(branch) if branch else None
1903 1905 bookmark = safe_unicode(bookmark) if bookmark else None
1904 1906
1905 1907 selected = None
1906 1908
1907 1909 # order matters: first source that has commit_id in it will be selected
1908 1910 sources = []
1909 1911 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1910 1912 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1911 1913
1912 1914 if commit_id:
1913 1915 ref_commit = (h.short_id(commit_id), commit_id)
1914 1916 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1915 1917
1916 1918 sources.append(
1917 1919 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1918 1920 )
1919 1921
1920 1922 groups = []
1921 1923
1922 1924 for group_key, ref_list, group_name, match in sources:
1923 1925 group_refs = []
1924 1926 for ref_name, ref_id in ref_list:
1925 1927 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1926 1928 group_refs.append((ref_key, ref_name))
1927 1929
1928 1930 if not selected:
1929 1931 if set([commit_id, match]) & set([ref_id, ref_name]):
1930 1932 selected = ref_key
1931 1933
1932 1934 if group_refs:
1933 1935 groups.append((group_refs, group_name))
1934 1936
1935 1937 if not selected:
1936 1938 ref = commit_id or branch or bookmark
1937 1939 if ref:
1938 1940 raise CommitDoesNotExistError(
1939 1941 u'No commit refs could be found matching: {}'.format(ref))
1940 1942 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1941 1943 selected = u'branch:{}:{}'.format(
1942 1944 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1943 1945 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1944 1946 )
1945 1947 elif repo.commit_ids:
1946 1948 # make the user select in this case
1947 1949 selected = None
1948 1950 else:
1949 1951 raise EmptyRepositoryError()
1950 1952 return groups, selected
1951 1953
1952 1954 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1953 1955 hide_whitespace_changes, diff_context):
1954 1956
1955 1957 return self._get_diff_from_pr_or_version(
1956 1958 source_repo, source_ref_id, target_ref_id,
1957 1959 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1958 1960
1959 1961 def _get_diff_from_pr_or_version(
1960 1962 self, source_repo, source_ref_id, target_ref_id,
1961 1963 hide_whitespace_changes, diff_context):
1962 1964
1963 1965 target_commit = source_repo.get_commit(
1964 1966 commit_id=safe_str(target_ref_id))
1965 1967 source_commit = source_repo.get_commit(
1966 1968 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
1967 1969 if isinstance(source_repo, Repository):
1968 1970 vcs_repo = source_repo.scm_instance()
1969 1971 else:
1970 1972 vcs_repo = source_repo
1971 1973
1972 1974 # TODO: johbo: In the context of an update, we cannot reach
1973 1975 # the old commit anymore with our normal mechanisms. It needs
1974 1976 # some sort of special support in the vcs layer to avoid this
1975 1977 # workaround.
1976 1978 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1977 1979 vcs_repo.alias == 'git'):
1978 1980 source_commit.raw_id = safe_str(source_ref_id)
1979 1981
1980 1982 log.debug('calculating diff between '
1981 1983 'source_ref:%s and target_ref:%s for repo `%s`',
1982 1984 target_ref_id, source_ref_id,
1983 1985 safe_unicode(vcs_repo.path))
1984 1986
1985 1987 vcs_diff = vcs_repo.get_diff(
1986 1988 commit1=target_commit, commit2=source_commit,
1987 1989 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1988 1990 return vcs_diff
1989 1991
1990 1992 def _is_merge_enabled(self, pull_request):
1991 1993 return self._get_general_setting(
1992 1994 pull_request, 'rhodecode_pr_merge_enabled')
1993 1995
1994 1996 def _use_rebase_for_merging(self, pull_request):
1995 1997 repo_type = pull_request.target_repo.repo_type
1996 1998 if repo_type == 'hg':
1997 1999 return self._get_general_setting(
1998 2000 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1999 2001 elif repo_type == 'git':
2000 2002 return self._get_general_setting(
2001 2003 pull_request, 'rhodecode_git_use_rebase_for_merging')
2002 2004
2003 2005 return False
2004 2006
2005 2007 def _user_name_for_merging(self, pull_request, user):
2006 2008 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2007 2009 if env_user_name_attr and hasattr(user, env_user_name_attr):
2008 2010 user_name_attr = env_user_name_attr
2009 2011 else:
2010 2012 user_name_attr = 'short_contact'
2011 2013
2012 2014 user_name = getattr(user, user_name_attr)
2013 2015 return user_name
2014 2016
2015 2017 def _close_branch_before_merging(self, pull_request):
2016 2018 repo_type = pull_request.target_repo.repo_type
2017 2019 if repo_type == 'hg':
2018 2020 return self._get_general_setting(
2019 2021 pull_request, 'rhodecode_hg_close_branch_before_merging')
2020 2022 elif repo_type == 'git':
2021 2023 return self._get_general_setting(
2022 2024 pull_request, 'rhodecode_git_close_branch_before_merging')
2023 2025
2024 2026 return False
2025 2027
2026 2028 def _get_general_setting(self, pull_request, settings_key, default=False):
2027 2029 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2028 2030 settings = settings_model.get_general_settings()
2029 2031 return settings.get(settings_key, default)
2030 2032
2031 2033 def _log_audit_action(self, action, action_data, user, pull_request):
2032 2034 audit_logger.store(
2033 2035 action=action,
2034 2036 action_data=action_data,
2035 2037 user=user,
2036 2038 repo=pull_request.target_repo)
2037 2039
2038 2040 def get_reviewer_functions(self):
2039 2041 """
2040 2042 Fetches functions for validation and fetching default reviewers.
2041 2043 If available we use the EE package, else we fallback to CE
2042 2044 package functions
2043 2045 """
2044 2046 try:
2045 2047 from rc_reviewers.utils import get_default_reviewers_data
2046 2048 from rc_reviewers.utils import validate_default_reviewers
2047 2049 from rc_reviewers.utils import validate_observers
2048 2050 except ImportError:
2049 2051 from rhodecode.apps.repository.utils import get_default_reviewers_data
2050 2052 from rhodecode.apps.repository.utils import validate_default_reviewers
2051 2053 from rhodecode.apps.repository.utils import validate_observers
2052 2054
2053 2055 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2054 2056
2055 2057
2056 2058 class MergeCheck(object):
2057 2059 """
2058 2060 Perform Merge Checks and returns a check object which stores information
2059 2061 about merge errors, and merge conditions
2060 2062 """
2061 2063 TODO_CHECK = 'todo'
2062 2064 PERM_CHECK = 'perm'
2063 2065 REVIEW_CHECK = 'review'
2064 2066 MERGE_CHECK = 'merge'
2065 2067 WIP_CHECK = 'wip'
2066 2068
2067 2069 def __init__(self):
2068 2070 self.review_status = None
2069 2071 self.merge_possible = None
2070 2072 self.merge_msg = ''
2071 2073 self.merge_response = None
2072 2074 self.failed = None
2073 2075 self.errors = []
2074 2076 self.error_details = OrderedDict()
2075 2077 self.source_commit = AttributeDict()
2076 2078 self.target_commit = AttributeDict()
2077 2079 self.reviewers_count = 0
2078 2080 self.observers_count = 0
2079 2081
2080 2082 def __repr__(self):
2081 2083 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2082 2084 self.merge_possible, self.failed, self.errors)
2083 2085
2084 2086 def push_error(self, error_type, message, error_key, details):
2085 2087 self.failed = True
2086 2088 self.errors.append([error_type, message])
2087 2089 self.error_details[error_key] = dict(
2088 2090 details=details,
2089 2091 error_type=error_type,
2090 2092 message=message
2091 2093 )
2092 2094
2093 2095 @classmethod
2094 2096 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2095 2097 force_shadow_repo_refresh=False):
2096 2098 _ = translator
2097 2099 merge_check = cls()
2098 2100
2099 2101 # title has WIP:
2100 2102 if pull_request.work_in_progress:
2101 2103 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2102 2104
2103 2105 msg = _('WIP marker in title prevents from accidental merge.')
2104 2106 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2105 2107 if fail_early:
2106 2108 return merge_check
2107 2109
2108 2110 # permissions to merge
2109 2111 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2110 2112 if not user_allowed_to_merge:
2111 2113 log.debug("MergeCheck: cannot merge, approval is pending.")
2112 2114
2113 2115 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2114 2116 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2115 2117 if fail_early:
2116 2118 return merge_check
2117 2119
2118 2120 # permission to merge into the target branch
2119 2121 target_commit_id = pull_request.target_ref_parts.commit_id
2120 2122 if pull_request.target_ref_parts.type == 'branch':
2121 2123 branch_name = pull_request.target_ref_parts.name
2122 2124 else:
2123 2125 # for mercurial we can always figure out the branch from the commit
2124 2126 # in case of bookmark
2125 2127 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2126 2128 branch_name = target_commit.branch
2127 2129
2128 2130 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2129 2131 pull_request.target_repo.repo_name, branch_name)
2130 2132 if branch_perm and branch_perm == 'branch.none':
2131 2133 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2132 2134 branch_name, rule)
2133 2135 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2134 2136 if fail_early:
2135 2137 return merge_check
2136 2138
2137 2139 # review status, must be always present
2138 2140 review_status = pull_request.calculated_review_status()
2139 2141 merge_check.review_status = review_status
2140 2142 merge_check.reviewers_count = pull_request.reviewers_count
2141 2143 merge_check.observers_count = pull_request.observers_count
2142 2144
2143 2145 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2144 2146 if not status_approved and merge_check.reviewers_count:
2145 2147 log.debug("MergeCheck: cannot merge, approval is pending.")
2146 2148 msg = _('Pull request reviewer approval is pending.')
2147 2149
2148 2150 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2149 2151
2150 2152 if fail_early:
2151 2153 return merge_check
2152 2154
2153 2155 # left over TODOs
2154 2156 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2155 2157 if todos:
2156 2158 log.debug("MergeCheck: cannot merge, {} "
2157 2159 "unresolved TODOs left.".format(len(todos)))
2158 2160
2159 2161 if len(todos) == 1:
2160 2162 msg = _('Cannot merge, {} TODO still not resolved.').format(
2161 2163 len(todos))
2162 2164 else:
2163 2165 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2164 2166 len(todos))
2165 2167
2166 2168 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2167 2169
2168 2170 if fail_early:
2169 2171 return merge_check
2170 2172
2171 2173 # merge possible, here is the filesystem simulation + shadow repo
2172 2174 merge_response, merge_status, msg = PullRequestModel().merge_status(
2173 2175 pull_request, translator=translator,
2174 2176 force_shadow_repo_refresh=force_shadow_repo_refresh)
2175 2177
2176 2178 merge_check.merge_possible = merge_status
2177 2179 merge_check.merge_msg = msg
2178 2180 merge_check.merge_response = merge_response
2179 2181
2180 2182 source_ref_id = pull_request.source_ref_parts.commit_id
2181 2183 target_ref_id = pull_request.target_ref_parts.commit_id
2182 2184
2183 2185 try:
2184 2186 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2185 2187 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2186 2188 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2187 2189 merge_check.source_commit.current_raw_id = source_commit.raw_id
2188 2190 merge_check.source_commit.previous_raw_id = source_ref_id
2189 2191
2190 2192 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2191 2193 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2192 2194 merge_check.target_commit.current_raw_id = target_commit.raw_id
2193 2195 merge_check.target_commit.previous_raw_id = target_ref_id
2194 2196 except (SourceRefMissing, TargetRefMissing):
2195 2197 pass
2196 2198
2197 2199 if not merge_status:
2198 2200 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2199 2201 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2200 2202
2201 2203 if fail_early:
2202 2204 return merge_check
2203 2205
2204 2206 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2205 2207 return merge_check
2206 2208
2207 2209 @classmethod
2208 2210 def get_merge_conditions(cls, pull_request, translator):
2209 2211 _ = translator
2210 2212 merge_details = {}
2211 2213
2212 2214 model = PullRequestModel()
2213 2215 use_rebase = model._use_rebase_for_merging(pull_request)
2214 2216
2215 2217 if use_rebase:
2216 2218 merge_details['merge_strategy'] = dict(
2217 2219 details={},
2218 2220 message=_('Merge strategy: rebase')
2219 2221 )
2220 2222 else:
2221 2223 merge_details['merge_strategy'] = dict(
2222 2224 details={},
2223 2225 message=_('Merge strategy: explicit merge commit')
2224 2226 )
2225 2227
2226 2228 close_branch = model._close_branch_before_merging(pull_request)
2227 2229 if close_branch:
2228 2230 repo_type = pull_request.target_repo.repo_type
2229 2231 close_msg = ''
2230 2232 if repo_type == 'hg':
2231 2233 close_msg = _('Source branch will be closed before the merge.')
2232 2234 elif repo_type == 'git':
2233 2235 close_msg = _('Source branch will be deleted after the merge.')
2234 2236
2235 2237 merge_details['close_branch'] = dict(
2236 2238 details={},
2237 2239 message=close_msg
2238 2240 )
2239 2241
2240 2242 return merge_details
2241 2243
2242 2244
2243 2245 ChangeTuple = collections.namedtuple(
2244 2246 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2245 2247
2246 2248 FileChangeTuple = collections.namedtuple(
2247 2249 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,1172 +1,1183 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 36 from rhodecode.lib import hooks_base
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 42 from rhodecode.lib.vcs.backends import get_backend
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.db import (
45 45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.permission import PermissionModel
49 50 from rhodecode.model.settings import VcsSettingsModel
50 51
51 52 log = logging.getLogger(__name__)
52 53
53 54
54 55 class RepoModel(BaseModel):
55 56
56 57 cls = Repository
57 58
58 59 def _get_user_group(self, users_group):
59 60 return self._get_instance(UserGroup, users_group,
60 61 callback=UserGroup.get_by_group_name)
61 62
62 63 def _get_repo_group(self, repo_group):
63 64 return self._get_instance(RepoGroup, repo_group,
64 65 callback=RepoGroup.get_by_group_name)
65 66
66 67 def _create_default_perms(self, repository, private):
67 68 # create default permission
68 69 default = 'repository.read'
69 70 def_user = User.get_default_user()
70 71 for p in def_user.user_perms:
71 72 if p.permission.permission_name.startswith('repository.'):
72 73 default = p.permission.permission_name
73 74 break
74 75
75 76 default_perm = 'repository.none' if private else default
76 77
77 78 repo_to_perm = UserRepoToPerm()
78 79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 80
80 81 repo_to_perm.repository = repository
81 82 repo_to_perm.user_id = def_user.user_id
82 83
83 84 return repo_to_perm
84 85
85 86 @LazyProperty
86 87 def repos_path(self):
87 88 """
88 89 Gets the repositories root path from database
89 90 """
90 91 settings_model = VcsSettingsModel(sa=self.sa)
91 92 return settings_model.get_repos_location()
92 93
93 94 def get(self, repo_id):
94 95 repo = self.sa.query(Repository) \
95 96 .filter(Repository.repo_id == repo_id)
96 97
97 98 return repo.scalar()
98 99
99 100 def get_repo(self, repository):
100 101 return self._get_repo(repository)
101 102
102 103 def get_by_repo_name(self, repo_name, cache=False):
103 104 repo = self.sa.query(Repository) \
104 105 .filter(Repository.repo_name == repo_name)
105 106
106 107 if cache:
107 108 name_key = _hash_key(repo_name)
108 109 repo = repo.options(
109 110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
110 111 return repo.scalar()
111 112
112 113 def _extract_id_from_repo_name(self, repo_name):
113 114 if repo_name.startswith('/'):
114 115 repo_name = repo_name.lstrip('/')
115 116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 117 if by_id_match:
117 118 return by_id_match.groups()[0]
118 119
119 120 def get_repo_by_id(self, repo_name):
120 121 """
121 122 Extracts repo_name by id from special urls.
122 123 Example url is _11/repo_name
123 124
124 125 :param repo_name:
125 126 :return: repo object if matched else None
126 127 """
127 128
128 129 try:
129 130 _repo_id = self._extract_id_from_repo_name(repo_name)
130 131 if _repo_id:
131 132 return self.get(_repo_id)
132 133 except Exception:
133 134 log.exception('Failed to extract repo_name from URL')
134 135
135 136 return None
136 137
137 138 def get_repos_for_root(self, root, traverse=False):
138 139 if traverse:
139 140 like_expression = u'{}%'.format(safe_unicode(root))
140 141 repos = Repository.query().filter(
141 142 Repository.repo_name.like(like_expression)).all()
142 143 else:
143 144 if root and not isinstance(root, RepoGroup):
144 145 raise ValueError(
145 146 'Root must be an instance '
146 147 'of RepoGroup, got:{} instead'.format(type(root)))
147 148 repos = Repository.query().filter(Repository.group == root).all()
148 149 return repos
149 150
150 151 def get_url(self, repo, request=None, permalink=False):
151 152 if not request:
152 153 request = get_current_request()
153 154
154 155 if not request:
155 156 return
156 157
157 158 if permalink:
158 159 return request.route_url(
159 160 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
160 161 else:
161 162 return request.route_url(
162 163 'repo_summary', repo_name=safe_str(repo.repo_name))
163 164
164 165 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
165 166 if not request:
166 167 request = get_current_request()
167 168
168 169 if not request:
169 170 return
170 171
171 172 if permalink:
172 173 return request.route_url(
173 174 'repo_commit', repo_name=safe_str(repo.repo_id),
174 175 commit_id=commit_id)
175 176
176 177 else:
177 178 return request.route_url(
178 179 'repo_commit', repo_name=safe_str(repo.repo_name),
179 180 commit_id=commit_id)
180 181
181 182 def get_repo_log(self, repo, filter_term):
182 183 repo_log = UserLog.query()\
183 184 .filter(or_(UserLog.repository_id == repo.repo_id,
184 185 UserLog.repository_name == repo.repo_name))\
185 186 .options(joinedload(UserLog.user))\
186 187 .options(joinedload(UserLog.repository))\
187 188 .order_by(UserLog.action_date.desc())
188 189
189 190 repo_log = user_log_filter(repo_log, filter_term)
190 191 return repo_log
191 192
192 193 @classmethod
193 194 def update_commit_cache(cls, repositories=None):
194 195 if not repositories:
195 196 repositories = Repository.getAll()
196 197 for repo in repositories:
197 198 repo.update_commit_cache()
198 199
199 200 def get_repos_as_dict(self, repo_list=None, admin=False,
200 201 super_user_actions=False, short_name=None):
201 202
202 203 _render = get_current_request().get_partial_renderer(
203 204 'rhodecode:templates/data_table/_dt_elements.mako')
204 205 c = _render.get_call_context()
205 206 h = _render.get_helpers()
206 207
207 208 def quick_menu(repo_name):
208 209 return _render('quick_menu', repo_name)
209 210
210 211 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 212 if short_name is not None:
212 213 short_name_var = short_name
213 214 else:
214 215 short_name_var = not admin
215 216 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
216 217 short_name=short_name_var, admin=False)
217 218
218 219 def last_change(last_change):
219 220 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
220 221 ts = time.time()
221 222 utc_offset = (datetime.datetime.fromtimestamp(ts)
222 223 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
223 224 last_change = last_change + datetime.timedelta(seconds=utc_offset)
224 225
225 226 return _render("last_change", last_change)
226 227
227 228 def rss_lnk(repo_name):
228 229 return _render("rss", repo_name)
229 230
230 231 def atom_lnk(repo_name):
231 232 return _render("atom", repo_name)
232 233
233 234 def last_rev(repo_name, cs_cache):
234 235 return _render('revision', repo_name, cs_cache.get('revision'),
235 236 cs_cache.get('raw_id'), cs_cache.get('author'),
236 237 cs_cache.get('message'), cs_cache.get('date'))
237 238
238 239 def desc(desc):
239 240 return _render('repo_desc', desc, c.visual.stylify_metatags)
240 241
241 242 def state(repo_state):
242 243 return _render("repo_state", repo_state)
243 244
244 245 def repo_actions(repo_name):
245 246 return _render('repo_actions', repo_name, super_user_actions)
246 247
247 248 def user_profile(username):
248 249 return _render('user_profile', username)
249 250
250 251 repos_data = []
251 252 for repo in repo_list:
252 253 # NOTE(marcink): because we use only raw column we need to load it like that
253 254 changeset_cache = Repository._load_changeset_cache(
254 255 repo.repo_id, repo._changeset_cache)
255 256
256 257 row = {
257 258 "menu": quick_menu(repo.repo_name),
258 259
259 260 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
260 261 repo.private, repo.archived, repo.fork),
261 262
262 263 "desc": desc(h.escape(repo.description)),
263 264
264 265 "last_change": last_change(repo.updated_on),
265 266
266 267 "last_changeset": last_rev(repo.repo_name, changeset_cache),
267 268 "last_changeset_raw": changeset_cache.get('revision'),
268 269
269 270 "owner": user_profile(repo.User.username),
270 271
271 272 "state": state(repo.repo_state),
272 273 "rss": rss_lnk(repo.repo_name),
273 274 "atom": atom_lnk(repo.repo_name),
274 275 }
275 276 if admin:
276 277 row.update({
277 278 "action": repo_actions(repo.repo_name),
278 279 })
279 280 repos_data.append(row)
280 281
281 282 return repos_data
282 283
283 284 def get_repos_data_table(
284 285 self, draw, start, limit,
285 286 search_q, order_by, order_dir,
286 287 auth_user, repo_group_id):
287 288 from rhodecode.model.scm import RepoList
288 289
289 290 _perms = ['repository.read', 'repository.write', 'repository.admin']
290 291
291 292 repos = Repository.query() \
292 293 .filter(Repository.group_id == repo_group_id) \
293 294 .all()
294 295 auth_repo_list = RepoList(
295 296 repos, perm_set=_perms,
296 297 extra_kwargs=dict(user=auth_user))
297 298
298 299 allowed_ids = [-1]
299 300 for repo in auth_repo_list:
300 301 allowed_ids.append(repo.repo_id)
301 302
302 303 repos_data_total_count = Repository.query() \
303 304 .filter(Repository.group_id == repo_group_id) \
304 305 .filter(or_(
305 306 # generate multiple IN to fix limitation problems
306 307 *in_filter_generator(Repository.repo_id, allowed_ids))
307 308 ) \
308 309 .count()
309 310
310 311 base_q = Session.query(
311 312 Repository.repo_id,
312 313 Repository.repo_name,
313 314 Repository.description,
314 315 Repository.repo_type,
315 316 Repository.repo_state,
316 317 Repository.private,
317 318 Repository.archived,
318 319 Repository.fork,
319 320 Repository.updated_on,
320 321 Repository._changeset_cache,
321 322 User,
322 323 ) \
323 324 .filter(Repository.group_id == repo_group_id) \
324 325 .filter(or_(
325 326 # generate multiple IN to fix limitation problems
326 327 *in_filter_generator(Repository.repo_id, allowed_ids))
327 328 ) \
328 329 .join(User, User.user_id == Repository.user_id) \
329 330 .group_by(Repository, User)
330 331
331 332 repos_data_total_filtered_count = base_q.count()
332 333
333 334 sort_defined = False
334 335 if order_by == 'repo_name':
335 336 sort_col = func.lower(Repository.repo_name)
336 337 sort_defined = True
337 338 elif order_by == 'user_username':
338 339 sort_col = User.username
339 340 else:
340 341 sort_col = getattr(Repository, order_by, None)
341 342
342 343 if sort_defined or sort_col:
343 344 if order_dir == 'asc':
344 345 sort_col = sort_col.asc()
345 346 else:
346 347 sort_col = sort_col.desc()
347 348
348 349 base_q = base_q.order_by(sort_col)
349 350 base_q = base_q.offset(start).limit(limit)
350 351
351 352 repos_list = base_q.all()
352 353
353 354 repos_data = RepoModel().get_repos_as_dict(
354 355 repo_list=repos_list, admin=False)
355 356
356 357 data = ({
357 358 'draw': draw,
358 359 'data': repos_data,
359 360 'recordsTotal': repos_data_total_count,
360 361 'recordsFiltered': repos_data_total_filtered_count,
361 362 })
362 363 return data
363 364
364 365 def _get_defaults(self, repo_name):
365 366 """
366 367 Gets information about repository, and returns a dict for
367 368 usage in forms
368 369
369 370 :param repo_name:
370 371 """
371 372
372 373 repo_info = Repository.get_by_repo_name(repo_name)
373 374
374 375 if repo_info is None:
375 376 return None
376 377
377 378 defaults = repo_info.get_dict()
378 379 defaults['repo_name'] = repo_info.just_name
379 380
380 381 groups = repo_info.groups_with_parents
381 382 parent_group = groups[-1] if groups else None
382 383
383 384 # we use -1 as this is how in HTML, we mark an empty group
384 385 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
385 386
386 387 keys_to_process = (
387 388 {'k': 'repo_type', 'strip': False},
388 389 {'k': 'repo_enable_downloads', 'strip': True},
389 390 {'k': 'repo_description', 'strip': True},
390 391 {'k': 'repo_enable_locking', 'strip': True},
391 392 {'k': 'repo_landing_rev', 'strip': True},
392 393 {'k': 'clone_uri', 'strip': False},
393 394 {'k': 'push_uri', 'strip': False},
394 395 {'k': 'repo_private', 'strip': True},
395 396 {'k': 'repo_enable_statistics', 'strip': True}
396 397 )
397 398
398 399 for item in keys_to_process:
399 400 attr = item['k']
400 401 if item['strip']:
401 402 attr = remove_prefix(item['k'], 'repo_')
402 403
403 404 val = defaults[attr]
404 405 if item['k'] == 'repo_landing_rev':
405 406 val = ':'.join(defaults[attr])
406 407 defaults[item['k']] = val
407 408 if item['k'] == 'clone_uri':
408 409 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
409 410 if item['k'] == 'push_uri':
410 411 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
411 412
412 413 # fill owner
413 414 if repo_info.user:
414 415 defaults.update({'user': repo_info.user.username})
415 416 else:
416 417 replacement_user = User.get_first_super_admin().username
417 418 defaults.update({'user': replacement_user})
418 419
419 420 return defaults
420 421
421 422 def update(self, repo, **kwargs):
422 423 try:
423 424 cur_repo = self._get_repo(repo)
424 425 source_repo_name = cur_repo.repo_name
426
427 affected_user_ids = []
425 428 if 'user' in kwargs:
426 cur_repo.user = User.get_by_username(kwargs['user'])
429 old_owner_id = cur_repo.user.user_id
430 new_owner = User.get_by_username(kwargs['user'])
431 cur_repo.user = new_owner
432
433 if old_owner_id != new_owner.user_id:
434 affected_user_ids = [new_owner.user_id, old_owner_id]
427 435
428 436 if 'repo_group' in kwargs:
429 437 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
430 438 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
431 439
432 440 update_keys = [
433 441 (1, 'repo_description'),
434 442 (1, 'repo_landing_rev'),
435 443 (1, 'repo_private'),
436 444 (1, 'repo_enable_downloads'),
437 445 (1, 'repo_enable_locking'),
438 446 (1, 'repo_enable_statistics'),
439 447 (0, 'clone_uri'),
440 448 (0, 'push_uri'),
441 449 (0, 'fork_id')
442 450 ]
443 451 for strip, k in update_keys:
444 452 if k in kwargs:
445 453 val = kwargs[k]
446 454 if strip:
447 455 k = remove_prefix(k, 'repo_')
448 456
449 457 setattr(cur_repo, k, val)
450 458
451 459 new_name = cur_repo.get_new_name(kwargs['repo_name'])
452 460 cur_repo.repo_name = new_name
453 461
454 462 # if private flag is set, reset default permission to NONE
455 463 if kwargs.get('repo_private'):
456 464 EMPTY_PERM = 'repository.none'
457 465 RepoModel().grant_user_permission(
458 466 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
459 467 )
460 468
461 469 # handle extra fields
462 470 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
463 471 k = RepositoryField.un_prefix_key(field)
464 472 ex_field = RepositoryField.get_by_key_name(
465 473 key=k, repo=cur_repo)
466 474 if ex_field:
467 475 ex_field.field_value = kwargs[field]
468 476 self.sa.add(ex_field)
469 477
470 478 self.sa.add(cur_repo)
471 479
472 480 if source_repo_name != new_name:
473 481 # rename repository
474 482 self._rename_filesystem_repo(
475 483 old=source_repo_name, new=new_name)
476 484
485 if affected_user_ids:
486 PermissionModel().trigger_permission_flush(affected_user_ids)
487
477 488 return cur_repo
478 489 except Exception:
479 490 log.error(traceback.format_exc())
480 491 raise
481 492
482 493 def _create_repo(self, repo_name, repo_type, description, owner,
483 494 private=False, clone_uri=None, repo_group=None,
484 495 landing_rev='rev:tip', fork_of=None,
485 496 copy_fork_permissions=False, enable_statistics=False,
486 497 enable_locking=False, enable_downloads=False,
487 498 copy_group_permissions=False,
488 499 state=Repository.STATE_PENDING):
489 500 """
490 501 Create repository inside database with PENDING state, this should be
491 502 only executed by create() repo. With exception of importing existing
492 503 repos
493 504 """
494 505 from rhodecode.model.scm import ScmModel
495 506
496 507 owner = self._get_user(owner)
497 508 fork_of = self._get_repo(fork_of)
498 509 repo_group = self._get_repo_group(safe_int(repo_group))
499 510
500 511 try:
501 512 repo_name = safe_unicode(repo_name)
502 513 description = safe_unicode(description)
503 514 # repo name is just a name of repository
504 515 # while repo_name_full is a full qualified name that is combined
505 516 # with name and path of group
506 517 repo_name_full = repo_name
507 518 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
508 519
509 520 new_repo = Repository()
510 521 new_repo.repo_state = state
511 522 new_repo.enable_statistics = False
512 523 new_repo.repo_name = repo_name_full
513 524 new_repo.repo_type = repo_type
514 525 new_repo.user = owner
515 526 new_repo.group = repo_group
516 527 new_repo.description = description or repo_name
517 528 new_repo.private = private
518 529 new_repo.archived = False
519 530 new_repo.clone_uri = clone_uri
520 531 new_repo.landing_rev = landing_rev
521 532
522 533 new_repo.enable_statistics = enable_statistics
523 534 new_repo.enable_locking = enable_locking
524 535 new_repo.enable_downloads = enable_downloads
525 536
526 537 if repo_group:
527 538 new_repo.enable_locking = repo_group.enable_locking
528 539
529 540 if fork_of:
530 541 parent_repo = fork_of
531 542 new_repo.fork = parent_repo
532 543
533 544 events.trigger(events.RepoPreCreateEvent(new_repo))
534 545
535 546 self.sa.add(new_repo)
536 547
537 548 EMPTY_PERM = 'repository.none'
538 549 if fork_of and copy_fork_permissions:
539 550 repo = fork_of
540 551 user_perms = UserRepoToPerm.query() \
541 552 .filter(UserRepoToPerm.repository == repo).all()
542 553 group_perms = UserGroupRepoToPerm.query() \
543 554 .filter(UserGroupRepoToPerm.repository == repo).all()
544 555
545 556 for perm in user_perms:
546 557 UserRepoToPerm.create(
547 558 perm.user, new_repo, perm.permission)
548 559
549 560 for perm in group_perms:
550 561 UserGroupRepoToPerm.create(
551 562 perm.users_group, new_repo, perm.permission)
552 563 # in case we copy permissions and also set this repo to private
553 564 # override the default user permission to make it a private repo
554 565 if private:
555 566 RepoModel(self.sa).grant_user_permission(
556 567 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
557 568
558 569 elif repo_group and copy_group_permissions:
559 570 user_perms = UserRepoGroupToPerm.query() \
560 571 .filter(UserRepoGroupToPerm.group == repo_group).all()
561 572
562 573 group_perms = UserGroupRepoGroupToPerm.query() \
563 574 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
564 575
565 576 for perm in user_perms:
566 577 perm_name = perm.permission.permission_name.replace(
567 578 'group.', 'repository.')
568 579 perm_obj = Permission.get_by_key(perm_name)
569 580 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
570 581
571 582 for perm in group_perms:
572 583 perm_name = perm.permission.permission_name.replace(
573 584 'group.', 'repository.')
574 585 perm_obj = Permission.get_by_key(perm_name)
575 586 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
576 587
577 588 if private:
578 589 RepoModel(self.sa).grant_user_permission(
579 590 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
580 591
581 592 else:
582 593 perm_obj = self._create_default_perms(new_repo, private)
583 594 self.sa.add(perm_obj)
584 595
585 596 # now automatically start following this repository as owner
586 597 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
587 598
588 599 # we need to flush here, in order to check if database won't
589 600 # throw any exceptions, create filesystem dirs at the very end
590 601 self.sa.flush()
591 602 events.trigger(events.RepoCreateEvent(new_repo))
592 603 return new_repo
593 604
594 605 except Exception:
595 606 log.error(traceback.format_exc())
596 607 raise
597 608
598 609 def create(self, form_data, cur_user):
599 610 """
600 611 Create repository using celery tasks
601 612
602 613 :param form_data:
603 614 :param cur_user:
604 615 """
605 616 from rhodecode.lib.celerylib import tasks, run_task
606 617 return run_task(tasks.create_repo, form_data, cur_user)
607 618
608 619 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
609 620 perm_deletions=None, check_perms=True,
610 621 cur_user=None):
611 622 if not perm_additions:
612 623 perm_additions = []
613 624 if not perm_updates:
614 625 perm_updates = []
615 626 if not perm_deletions:
616 627 perm_deletions = []
617 628
618 629 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
619 630
620 631 changes = {
621 632 'added': [],
622 633 'updated': [],
623 634 'deleted': [],
624 635 'default_user_changed': None
625 636 }
626 637
627 638 repo = self._get_repo(repo)
628 639
629 640 # update permissions
630 641 for member_id, perm, member_type in perm_updates:
631 642 member_id = int(member_id)
632 643 if member_type == 'user':
633 644 member_name = User.get(member_id).username
634 645 if member_name == User.DEFAULT_USER:
635 646 # NOTE(dan): detect if we changed permissions for default user
636 647 perm_obj = self.sa.query(UserRepoToPerm) \
637 648 .filter(UserRepoToPerm.user_id == member_id) \
638 649 .filter(UserRepoToPerm.repository == repo) \
639 650 .scalar()
640 651 if perm_obj and perm_obj.permission.permission_name != perm:
641 652 changes['default_user_changed'] = True
642 653
643 654 # this updates also current one if found
644 655 self.grant_user_permission(
645 656 repo=repo, user=member_id, perm=perm)
646 657 elif member_type == 'user_group':
647 658 # check if we have permissions to alter this usergroup
648 659 member_name = UserGroup.get(member_id).users_group_name
649 660 if not check_perms or HasUserGroupPermissionAny(
650 661 *req_perms)(member_name, user=cur_user):
651 662 self.grant_user_group_permission(
652 663 repo=repo, group_name=member_id, perm=perm)
653 664 else:
654 665 raise ValueError("member_type must be 'user' or 'user_group' "
655 666 "got {} instead".format(member_type))
656 667 changes['updated'].append({'type': member_type, 'id': member_id,
657 668 'name': member_name, 'new_perm': perm})
658 669
659 670 # set new permissions
660 671 for member_id, perm, member_type in perm_additions:
661 672 member_id = int(member_id)
662 673 if member_type == 'user':
663 674 member_name = User.get(member_id).username
664 675 self.grant_user_permission(
665 676 repo=repo, user=member_id, perm=perm)
666 677 elif member_type == 'user_group':
667 678 # check if we have permissions to alter this usergroup
668 679 member_name = UserGroup.get(member_id).users_group_name
669 680 if not check_perms or HasUserGroupPermissionAny(
670 681 *req_perms)(member_name, user=cur_user):
671 682 self.grant_user_group_permission(
672 683 repo=repo, group_name=member_id, perm=perm)
673 684 else:
674 685 raise ValueError("member_type must be 'user' or 'user_group' "
675 686 "got {} instead".format(member_type))
676 687
677 688 changes['added'].append({'type': member_type, 'id': member_id,
678 689 'name': member_name, 'new_perm': perm})
679 690 # delete permissions
680 691 for member_id, perm, member_type in perm_deletions:
681 692 member_id = int(member_id)
682 693 if member_type == 'user':
683 694 member_name = User.get(member_id).username
684 695 self.revoke_user_permission(repo=repo, user=member_id)
685 696 elif member_type == 'user_group':
686 697 # check if we have permissions to alter this usergroup
687 698 member_name = UserGroup.get(member_id).users_group_name
688 699 if not check_perms or HasUserGroupPermissionAny(
689 700 *req_perms)(member_name, user=cur_user):
690 701 self.revoke_user_group_permission(
691 702 repo=repo, group_name=member_id)
692 703 else:
693 704 raise ValueError("member_type must be 'user' or 'user_group' "
694 705 "got {} instead".format(member_type))
695 706
696 707 changes['deleted'].append({'type': member_type, 'id': member_id,
697 708 'name': member_name, 'new_perm': perm})
698 709 return changes
699 710
700 711 def create_fork(self, form_data, cur_user):
701 712 """
702 713 Simple wrapper into executing celery task for fork creation
703 714
704 715 :param form_data:
705 716 :param cur_user:
706 717 """
707 718 from rhodecode.lib.celerylib import tasks, run_task
708 719 return run_task(tasks.create_repo_fork, form_data, cur_user)
709 720
710 721 def archive(self, repo):
711 722 """
712 723 Archive given repository. Set archive flag.
713 724
714 725 :param repo:
715 726 """
716 727 repo = self._get_repo(repo)
717 728 if repo:
718 729
719 730 try:
720 731 repo.archived = True
721 732 self.sa.add(repo)
722 733 self.sa.commit()
723 734 except Exception:
724 735 log.error(traceback.format_exc())
725 736 raise
726 737
727 738 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
728 739 """
729 740 Delete given repository, forks parameter defines what do do with
730 741 attached forks. Throws AttachedForksError if deleted repo has attached
731 742 forks
732 743
733 744 :param repo:
734 745 :param forks: str 'delete' or 'detach'
735 746 :param pull_requests: str 'delete' or None
736 747 :param fs_remove: remove(archive) repo from filesystem
737 748 """
738 749 if not cur_user:
739 750 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
740 751 repo = self._get_repo(repo)
741 752 if repo:
742 753 if forks == 'detach':
743 754 for r in repo.forks:
744 755 r.fork = None
745 756 self.sa.add(r)
746 757 elif forks == 'delete':
747 758 for r in repo.forks:
748 759 self.delete(r, forks='delete')
749 760 elif [f for f in repo.forks]:
750 761 raise AttachedForksError()
751 762
752 763 # check for pull requests
753 764 pr_sources = repo.pull_requests_source
754 765 pr_targets = repo.pull_requests_target
755 766 if pull_requests != 'delete' and (pr_sources or pr_targets):
756 767 raise AttachedPullRequestsError()
757 768
758 769 old_repo_dict = repo.get_dict()
759 770 events.trigger(events.RepoPreDeleteEvent(repo))
760 771 try:
761 772 self.sa.delete(repo)
762 773 if fs_remove:
763 774 self._delete_filesystem_repo(repo)
764 775 else:
765 776 log.debug('skipping removal from filesystem')
766 777 old_repo_dict.update({
767 778 'deleted_by': cur_user,
768 779 'deleted_on': time.time(),
769 780 })
770 781 hooks_base.delete_repository(**old_repo_dict)
771 782 events.trigger(events.RepoDeleteEvent(repo))
772 783 except Exception:
773 784 log.error(traceback.format_exc())
774 785 raise
775 786
776 787 def grant_user_permission(self, repo, user, perm):
777 788 """
778 789 Grant permission for user on given repository, or update existing one
779 790 if found
780 791
781 792 :param repo: Instance of Repository, repository_id, or repository name
782 793 :param user: Instance of User, user_id or username
783 794 :param perm: Instance of Permission, or permission_name
784 795 """
785 796 user = self._get_user(user)
786 797 repo = self._get_repo(repo)
787 798 permission = self._get_perm(perm)
788 799
789 800 # check if we have that permission already
790 801 obj = self.sa.query(UserRepoToPerm) \
791 802 .filter(UserRepoToPerm.user == user) \
792 803 .filter(UserRepoToPerm.repository == repo) \
793 804 .scalar()
794 805 if obj is None:
795 806 # create new !
796 807 obj = UserRepoToPerm()
797 808 obj.repository = repo
798 809 obj.user = user
799 810 obj.permission = permission
800 811 self.sa.add(obj)
801 812 log.debug('Granted perm %s to %s on %s', perm, user, repo)
802 813 action_logger_generic(
803 814 'granted permission: {} to user: {} on repo: {}'.format(
804 815 perm, user, repo), namespace='security.repo')
805 816 return obj
806 817
807 818 def revoke_user_permission(self, repo, user):
808 819 """
809 820 Revoke permission for user on given repository
810 821
811 822 :param repo: Instance of Repository, repository_id, or repository name
812 823 :param user: Instance of User, user_id or username
813 824 """
814 825
815 826 user = self._get_user(user)
816 827 repo = self._get_repo(repo)
817 828
818 829 obj = self.sa.query(UserRepoToPerm) \
819 830 .filter(UserRepoToPerm.repository == repo) \
820 831 .filter(UserRepoToPerm.user == user) \
821 832 .scalar()
822 833 if obj:
823 834 self.sa.delete(obj)
824 835 log.debug('Revoked perm on %s on %s', repo, user)
825 836 action_logger_generic(
826 837 'revoked permission from user: {} on repo: {}'.format(
827 838 user, repo), namespace='security.repo')
828 839
829 840 def grant_user_group_permission(self, repo, group_name, perm):
830 841 """
831 842 Grant permission for user group on given repository, or update
832 843 existing one if found
833 844
834 845 :param repo: Instance of Repository, repository_id, or repository name
835 846 :param group_name: Instance of UserGroup, users_group_id,
836 847 or user group name
837 848 :param perm: Instance of Permission, or permission_name
838 849 """
839 850 repo = self._get_repo(repo)
840 851 group_name = self._get_user_group(group_name)
841 852 permission = self._get_perm(perm)
842 853
843 854 # check if we have that permission already
844 855 obj = self.sa.query(UserGroupRepoToPerm) \
845 856 .filter(UserGroupRepoToPerm.users_group == group_name) \
846 857 .filter(UserGroupRepoToPerm.repository == repo) \
847 858 .scalar()
848 859
849 860 if obj is None:
850 861 # create new
851 862 obj = UserGroupRepoToPerm()
852 863
853 864 obj.repository = repo
854 865 obj.users_group = group_name
855 866 obj.permission = permission
856 867 self.sa.add(obj)
857 868 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
858 869 action_logger_generic(
859 870 'granted permission: {} to usergroup: {} on repo: {}'.format(
860 871 perm, group_name, repo), namespace='security.repo')
861 872
862 873 return obj
863 874
864 875 def revoke_user_group_permission(self, repo, group_name):
865 876 """
866 877 Revoke permission for user group on given repository
867 878
868 879 :param repo: Instance of Repository, repository_id, or repository name
869 880 :param group_name: Instance of UserGroup, users_group_id,
870 881 or user group name
871 882 """
872 883 repo = self._get_repo(repo)
873 884 group_name = self._get_user_group(group_name)
874 885
875 886 obj = self.sa.query(UserGroupRepoToPerm) \
876 887 .filter(UserGroupRepoToPerm.repository == repo) \
877 888 .filter(UserGroupRepoToPerm.users_group == group_name) \
878 889 .scalar()
879 890 if obj:
880 891 self.sa.delete(obj)
881 892 log.debug('Revoked perm to %s on %s', repo, group_name)
882 893 action_logger_generic(
883 894 'revoked permission from usergroup: {} on repo: {}'.format(
884 895 group_name, repo), namespace='security.repo')
885 896
886 897 def delete_stats(self, repo_name):
887 898 """
888 899 removes stats for given repo
889 900
890 901 :param repo_name:
891 902 """
892 903 repo = self._get_repo(repo_name)
893 904 try:
894 905 obj = self.sa.query(Statistics) \
895 906 .filter(Statistics.repository == repo).scalar()
896 907 if obj:
897 908 self.sa.delete(obj)
898 909 except Exception:
899 910 log.error(traceback.format_exc())
900 911 raise
901 912
902 913 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
903 914 field_type='str', field_desc=''):
904 915
905 916 repo = self._get_repo(repo_name)
906 917
907 918 new_field = RepositoryField()
908 919 new_field.repository = repo
909 920 new_field.field_key = field_key
910 921 new_field.field_type = field_type # python type
911 922 new_field.field_value = field_value
912 923 new_field.field_desc = field_desc
913 924 new_field.field_label = field_label
914 925 self.sa.add(new_field)
915 926 return new_field
916 927
917 928 def delete_repo_field(self, repo_name, field_key):
918 929 repo = self._get_repo(repo_name)
919 930 field = RepositoryField.get_by_key_name(field_key, repo)
920 931 if field:
921 932 self.sa.delete(field)
922 933
923 934 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
924 935 clone_uri=None, repo_store_location=None,
925 936 use_global_config=False, install_hooks=True):
926 937 """
927 938 makes repository on filesystem. It's group aware means it'll create
928 939 a repository within a group, and alter the paths accordingly of
929 940 group location
930 941
931 942 :param repo_name:
932 943 :param alias:
933 944 :param parent:
934 945 :param clone_uri:
935 946 :param repo_store_location:
936 947 """
937 948 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
938 949 from rhodecode.model.scm import ScmModel
939 950
940 951 if Repository.NAME_SEP in repo_name:
941 952 raise ValueError(
942 953 'repo_name must not contain groups got `%s`' % repo_name)
943 954
944 955 if isinstance(repo_group, RepoGroup):
945 956 new_parent_path = os.sep.join(repo_group.full_path_splitted)
946 957 else:
947 958 new_parent_path = repo_group or ''
948 959
949 960 if repo_store_location:
950 961 _paths = [repo_store_location]
951 962 else:
952 963 _paths = [self.repos_path, new_parent_path, repo_name]
953 964 # we need to make it str for mercurial
954 965 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
955 966
956 967 # check if this path is not a repository
957 968 if is_valid_repo(repo_path, self.repos_path):
958 969 raise Exception('This path %s is a valid repository' % repo_path)
959 970
960 971 # check if this path is a group
961 972 if is_valid_repo_group(repo_path, self.repos_path):
962 973 raise Exception('This path %s is a valid group' % repo_path)
963 974
964 975 log.info('creating repo %s in %s from url: `%s`',
965 976 repo_name, safe_unicode(repo_path),
966 977 obfuscate_url_pw(clone_uri))
967 978
968 979 backend = get_backend(repo_type)
969 980
970 981 config_repo = None if use_global_config else repo_name
971 982 if config_repo and new_parent_path:
972 983 config_repo = Repository.NAME_SEP.join(
973 984 (new_parent_path, config_repo))
974 985 config = make_db_config(clear_session=False, repo=config_repo)
975 986 config.set('extensions', 'largefiles', '')
976 987
977 988 # patch and reset hooks section of UI config to not run any
978 989 # hooks on creating remote repo
979 990 config.clear_section('hooks')
980 991
981 992 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
982 993 if repo_type == 'git':
983 994 repo = backend(
984 995 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
985 996 with_wire={"cache": False})
986 997 else:
987 998 repo = backend(
988 999 repo_path, config=config, create=True, src_url=clone_uri,
989 1000 with_wire={"cache": False})
990 1001
991 1002 if install_hooks:
992 1003 repo.install_hooks()
993 1004
994 1005 log.debug('Created repo %s with %s backend',
995 1006 safe_unicode(repo_name), safe_unicode(repo_type))
996 1007 return repo
997 1008
998 1009 def _rename_filesystem_repo(self, old, new):
999 1010 """
1000 1011 renames repository on filesystem
1001 1012
1002 1013 :param old: old name
1003 1014 :param new: new name
1004 1015 """
1005 1016 log.info('renaming repo from %s to %s', old, new)
1006 1017
1007 1018 old_path = os.path.join(self.repos_path, old)
1008 1019 new_path = os.path.join(self.repos_path, new)
1009 1020 if os.path.isdir(new_path):
1010 1021 raise Exception(
1011 1022 'Was trying to rename to already existing dir %s' % new_path
1012 1023 )
1013 1024 shutil.move(old_path, new_path)
1014 1025
1015 1026 def _delete_filesystem_repo(self, repo):
1016 1027 """
1017 1028 removes repo from filesystem, the removal is acctually made by
1018 1029 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1019 1030 repository is no longer valid for rhodecode, can be undeleted later on
1020 1031 by reverting the renames on this repository
1021 1032
1022 1033 :param repo: repo object
1023 1034 """
1024 1035 rm_path = os.path.join(self.repos_path, repo.repo_name)
1025 1036 repo_group = repo.group
1026 1037 log.info("Removing repository %s", rm_path)
1027 1038 # disable hg/git internal that it doesn't get detected as repo
1028 1039 alias = repo.repo_type
1029 1040
1030 1041 config = make_db_config(clear_session=False)
1031 1042 config.set('extensions', 'largefiles', '')
1032 1043 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1033 1044
1034 1045 # skip this for bare git repos
1035 1046 if not bare:
1036 1047 # disable VCS repo
1037 1048 vcs_path = os.path.join(rm_path, '.%s' % alias)
1038 1049 if os.path.exists(vcs_path):
1039 1050 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1040 1051
1041 1052 _now = datetime.datetime.now()
1042 1053 _ms = str(_now.microsecond).rjust(6, '0')
1043 1054 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1044 1055 repo.just_name)
1045 1056 if repo_group:
1046 1057 # if repository is in group, prefix the removal path with the group
1047 1058 args = repo_group.full_path_splitted + [_d]
1048 1059 _d = os.path.join(*args)
1049 1060
1050 1061 if os.path.isdir(rm_path):
1051 1062 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1052 1063
1053 1064 # finally cleanup diff-cache if it exists
1054 1065 cached_diffs_dir = repo.cached_diffs_dir
1055 1066 if os.path.isdir(cached_diffs_dir):
1056 1067 shutil.rmtree(cached_diffs_dir)
1057 1068
1058 1069
1059 1070 class ReadmeFinder:
1060 1071 """
1061 1072 Utility which knows how to find a readme for a specific commit.
1062 1073
1063 1074 The main idea is that this is a configurable algorithm. When creating an
1064 1075 instance you can define parameters, currently only the `default_renderer`.
1065 1076 Based on this configuration the method :meth:`search` behaves slightly
1066 1077 different.
1067 1078 """
1068 1079
1069 1080 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1070 1081 path_re = re.compile(r'^docs?', re.IGNORECASE)
1071 1082
1072 1083 default_priorities = {
1073 1084 None: 0,
1074 1085 '.text': 2,
1075 1086 '.txt': 3,
1076 1087 '.rst': 1,
1077 1088 '.rest': 2,
1078 1089 '.md': 1,
1079 1090 '.mkdn': 2,
1080 1091 '.mdown': 3,
1081 1092 '.markdown': 4,
1082 1093 }
1083 1094
1084 1095 path_priority = {
1085 1096 'doc': 0,
1086 1097 'docs': 1,
1087 1098 }
1088 1099
1089 1100 FALLBACK_PRIORITY = 99
1090 1101
1091 1102 RENDERER_TO_EXTENSION = {
1092 1103 'rst': ['.rst', '.rest'],
1093 1104 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1094 1105 }
1095 1106
1096 1107 def __init__(self, default_renderer=None):
1097 1108 self._default_renderer = default_renderer
1098 1109 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1099 1110 default_renderer, [])
1100 1111
1101 1112 def search(self, commit, path=u'/'):
1102 1113 """
1103 1114 Find a readme in the given `commit`.
1104 1115 """
1105 1116 nodes = commit.get_nodes(path)
1106 1117 matches = self._match_readmes(nodes)
1107 1118 matches = self._sort_according_to_priority(matches)
1108 1119 if matches:
1109 1120 return matches[0].node
1110 1121
1111 1122 paths = self._match_paths(nodes)
1112 1123 paths = self._sort_paths_according_to_priority(paths)
1113 1124 for path in paths:
1114 1125 match = self.search(commit, path=path)
1115 1126 if match:
1116 1127 return match
1117 1128
1118 1129 return None
1119 1130
1120 1131 def _match_readmes(self, nodes):
1121 1132 for node in nodes:
1122 1133 if not node.is_file():
1123 1134 continue
1124 1135 path = node.path.rsplit('/', 1)[-1]
1125 1136 match = self.readme_re.match(path)
1126 1137 if match:
1127 1138 extension = match.group(1)
1128 1139 yield ReadmeMatch(node, match, self._priority(extension))
1129 1140
1130 1141 def _match_paths(self, nodes):
1131 1142 for node in nodes:
1132 1143 if not node.is_dir():
1133 1144 continue
1134 1145 match = self.path_re.match(node.path)
1135 1146 if match:
1136 1147 yield node.path
1137 1148
1138 1149 def _priority(self, extension):
1139 1150 renderer_priority = (
1140 1151 0 if extension in self._renderer_extensions else 1)
1141 1152 extension_priority = self.default_priorities.get(
1142 1153 extension, self.FALLBACK_PRIORITY)
1143 1154 return (renderer_priority, extension_priority)
1144 1155
1145 1156 def _sort_according_to_priority(self, matches):
1146 1157
1147 1158 def priority_and_path(match):
1148 1159 return (match.priority, match.path)
1149 1160
1150 1161 return sorted(matches, key=priority_and_path)
1151 1162
1152 1163 def _sort_paths_according_to_priority(self, paths):
1153 1164
1154 1165 def priority_and_path(path):
1155 1166 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1156 1167
1157 1168 return sorted(paths, key=priority_and_path)
1158 1169
1159 1170
1160 1171 class ReadmeMatch:
1161 1172
1162 1173 def __init__(self, node, match, priority):
1163 1174 self.node = node
1164 1175 self._match = match
1165 1176 self.priority = priority
1166 1177
1167 1178 @property
1168 1179 def path(self):
1169 1180 return self.node.path
1170 1181
1171 1182 def __repr__(self):
1172 1183 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,887 +1,897 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 repo group model for RhodeCode
24 24 """
25 25
26 26 import os
27 27 import datetime
28 28 import itertools
29 29 import logging
30 30 import shutil
31 31 import time
32 32 import traceback
33 33 import string
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.model import BaseModel
39 39 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
40 40 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
41 41 UserGroup, Repository)
42 from rhodecode.model.permission import PermissionModel
42 43 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
43 44 from rhodecode.lib.caching_query import FromCache
44 45 from rhodecode.lib.utils2 import action_logger_generic
45 46
46 47 log = logging.getLogger(__name__)
47 48
48 49
49 50 class RepoGroupModel(BaseModel):
50 51
51 52 cls = RepoGroup
52 53 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
53 54 PERSONAL_GROUP_PATTERN = '${username}' # default
54 55
55 56 def _get_user_group(self, users_group):
56 57 return self._get_instance(UserGroup, users_group,
57 58 callback=UserGroup.get_by_group_name)
58 59
59 60 def _get_repo_group(self, repo_group):
60 61 return self._get_instance(RepoGroup, repo_group,
61 62 callback=RepoGroup.get_by_group_name)
62 63
63 64 def get_repo_group(self, repo_group):
64 65 return self._get_repo_group(repo_group)
65 66
66 67 @LazyProperty
67 68 def repos_path(self):
68 69 """
69 70 Gets the repositories root path from database
70 71 """
71 72
72 73 settings_model = VcsSettingsModel(sa=self.sa)
73 74 return settings_model.get_repos_location()
74 75
75 76 def get_by_group_name(self, repo_group_name, cache=None):
76 77 repo = self.sa.query(RepoGroup) \
77 78 .filter(RepoGroup.group_name == repo_group_name)
78 79
79 80 if cache:
80 81 name_key = _hash_key(repo_group_name)
81 82 repo = repo.options(
82 83 FromCache("sql_cache_short", "get_repo_group_%s" % name_key))
83 84 return repo.scalar()
84 85
85 86 def get_default_create_personal_repo_group(self):
86 87 value = SettingsModel().get_setting_by_name(
87 88 'create_personal_repo_group')
88 89 return value.app_settings_value if value else None or False
89 90
90 91 def get_personal_group_name_pattern(self):
91 92 value = SettingsModel().get_setting_by_name(
92 93 'personal_repo_group_pattern')
93 94 val = value.app_settings_value if value else None
94 95 group_template = val or self.PERSONAL_GROUP_PATTERN
95 96
96 97 group_template = group_template.lstrip('/')
97 98 return group_template
98 99
99 100 def get_personal_group_name(self, user):
100 101 template = self.get_personal_group_name_pattern()
101 102 return string.Template(template).safe_substitute(
102 103 username=user.username,
103 104 user_id=user.user_id,
104 105 first_name=user.first_name,
105 106 last_name=user.last_name,
106 107 )
107 108
108 109 def create_personal_repo_group(self, user, commit_early=True):
109 110 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
110 111 personal_repo_group_name = self.get_personal_group_name(user)
111 112
112 113 # create a new one
113 114 RepoGroupModel().create(
114 115 group_name=personal_repo_group_name,
115 116 group_description=desc,
116 117 owner=user.username,
117 118 personal=True,
118 119 commit_early=commit_early)
119 120
120 121 def _create_default_perms(self, new_group):
121 122 # create default permission
122 123 default_perm = 'group.read'
123 124 def_user = User.get_default_user()
124 125 for p in def_user.user_perms:
125 126 if p.permission.permission_name.startswith('group.'):
126 127 default_perm = p.permission.permission_name
127 128 break
128 129
129 130 repo_group_to_perm = UserRepoGroupToPerm()
130 131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
131 132
132 133 repo_group_to_perm.group = new_group
133 134 repo_group_to_perm.user_id = def_user.user_id
134 135 return repo_group_to_perm
135 136
136 137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
137 138 get_object=False):
138 139 """
139 140 Get's the group name and a parent group name from given group name.
140 141 If repo_in_path is set to truth, we asume the full path also includes
141 142 repo name, in such case we clean the last element.
142 143
143 144 :param group_name_full:
144 145 """
145 146 split_paths = 1
146 147 if repo_in_path:
147 148 split_paths = 2
148 149 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
149 150
150 151 if repo_in_path and len(_parts) > 1:
151 152 # such case last element is the repo_name
152 153 _parts.pop(-1)
153 154 group_name_cleaned = _parts[-1] # just the group name
154 155 parent_repo_group_name = None
155 156
156 157 if len(_parts) > 1:
157 158 parent_repo_group_name = _parts[0]
158 159
159 160 parent_group = None
160 161 if parent_repo_group_name:
161 162 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
162 163
163 164 if get_object:
164 165 return group_name_cleaned, parent_repo_group_name, parent_group
165 166
166 167 return group_name_cleaned, parent_repo_group_name
167 168
168 169 def check_exist_filesystem(self, group_name, exc_on_failure=True):
169 170 create_path = os.path.join(self.repos_path, group_name)
170 171 log.debug('creating new group in %s', create_path)
171 172
172 173 if os.path.isdir(create_path):
173 174 if exc_on_failure:
174 175 abs_create_path = os.path.abspath(create_path)
175 176 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
176 177 return False
177 178 return True
178 179
179 180 def _create_group(self, group_name):
180 181 """
181 182 makes repository group on filesystem
182 183
183 184 :param repo_name:
184 185 :param parent_id:
185 186 """
186 187
187 188 self.check_exist_filesystem(group_name)
188 189 create_path = os.path.join(self.repos_path, group_name)
189 190 log.debug('creating new group in %s', create_path)
190 191 os.makedirs(create_path, mode=0o755)
191 192 log.debug('created group in %s', create_path)
192 193
193 194 def _rename_group(self, old, new):
194 195 """
195 196 Renames a group on filesystem
196 197
197 198 :param group_name:
198 199 """
199 200
200 201 if old == new:
201 202 log.debug('skipping group rename')
202 203 return
203 204
204 205 log.debug('renaming repository group from %s to %s', old, new)
205 206
206 207 old_path = os.path.join(self.repos_path, old)
207 208 new_path = os.path.join(self.repos_path, new)
208 209
209 210 log.debug('renaming repos paths from %s to %s', old_path, new_path)
210 211
211 212 if os.path.isdir(new_path):
212 213 raise Exception('Was trying to rename to already '
213 214 'existing dir %s' % new_path)
214 215 shutil.move(old_path, new_path)
215 216
216 217 def _delete_filesystem_group(self, group, force_delete=False):
217 218 """
218 219 Deletes a group from a filesystem
219 220
220 221 :param group: instance of group from database
221 222 :param force_delete: use shutil rmtree to remove all objects
222 223 """
223 224 paths = group.full_path.split(RepoGroup.url_sep())
224 225 paths = os.sep.join(paths)
225 226
226 227 rm_path = os.path.join(self.repos_path, paths)
227 228 log.info("Removing group %s", rm_path)
228 229 # delete only if that path really exists
229 230 if os.path.isdir(rm_path):
230 231 if force_delete:
231 232 shutil.rmtree(rm_path)
232 233 else:
233 234 # archive that group`
234 235 _now = datetime.datetime.now()
235 236 _ms = str(_now.microsecond).rjust(6, '0')
236 237 _d = 'rm__%s_GROUP_%s' % (
237 238 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
238 239 shutil.move(rm_path, os.path.join(self.repos_path, _d))
239 240
240 241 def create(self, group_name, group_description, owner, just_db=False,
241 242 copy_permissions=False, personal=None, commit_early=True):
242 243
243 244 (group_name_cleaned,
244 245 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
245 246
246 247 parent_group = None
247 248 if parent_group_name:
248 249 parent_group = self._get_repo_group(parent_group_name)
249 250 if not parent_group:
250 251 # we tried to create a nested group, but the parent is not
251 252 # existing
252 253 raise ValueError(
253 254 'Parent group `%s` given in `%s` group name '
254 255 'is not yet existing.' % (parent_group_name, group_name))
255 256
256 257 # because we are doing a cleanup, we need to check if such directory
257 258 # already exists. If we don't do that we can accidentally delete
258 259 # existing directory via cleanup that can cause data issues, since
259 260 # delete does a folder rename to special syntax later cleanup
260 261 # functions can delete this
261 262 cleanup_group = self.check_exist_filesystem(group_name,
262 263 exc_on_failure=False)
263 264 user = self._get_user(owner)
264 265 if not user:
265 266 raise ValueError('Owner %s not found as rhodecode user', owner)
266 267
267 268 try:
268 269 new_repo_group = RepoGroup()
269 270 new_repo_group.user = user
270 271 new_repo_group.group_description = group_description or group_name
271 272 new_repo_group.parent_group = parent_group
272 273 new_repo_group.group_name = group_name
273 274 new_repo_group.personal = personal
274 275
275 276 self.sa.add(new_repo_group)
276 277
277 278 # create an ADMIN permission for owner except if we're super admin,
278 279 # later owner should go into the owner field of groups
279 280 if not user.is_admin:
280 281 self.grant_user_permission(repo_group=new_repo_group,
281 282 user=owner, perm='group.admin')
282 283
283 284 if parent_group and copy_permissions:
284 285 # copy permissions from parent
285 286 user_perms = UserRepoGroupToPerm.query() \
286 287 .filter(UserRepoGroupToPerm.group == parent_group).all()
287 288
288 289 group_perms = UserGroupRepoGroupToPerm.query() \
289 290 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
290 291
291 292 for perm in user_perms:
292 293 # don't copy over the permission for user who is creating
293 294 # this group, if he is not super admin he get's admin
294 295 # permission set above
295 296 if perm.user != user or user.is_admin:
296 297 UserRepoGroupToPerm.create(
297 298 perm.user, new_repo_group, perm.permission)
298 299
299 300 for perm in group_perms:
300 301 UserGroupRepoGroupToPerm.create(
301 302 perm.users_group, new_repo_group, perm.permission)
302 303 else:
303 304 perm_obj = self._create_default_perms(new_repo_group)
304 305 self.sa.add(perm_obj)
305 306
306 307 # now commit the changes, earlier so we are sure everything is in
307 308 # the database.
308 309 if commit_early:
309 310 self.sa.commit()
310 311 if not just_db:
311 312 self._create_group(new_repo_group.group_name)
312 313
313 314 # trigger the post hook
314 315 from rhodecode.lib import hooks_base
315 316 repo_group = RepoGroup.get_by_group_name(group_name)
316 317
317 318 # update repo group commit caches initially
318 319 repo_group.update_commit_cache()
319 320
320 321 hooks_base.create_repository_group(
321 322 created_by=user.username, **repo_group.get_dict())
322 323
323 324 # Trigger create event.
324 325 events.trigger(events.RepoGroupCreateEvent(repo_group))
325 326
326 327 return new_repo_group
327 328 except Exception:
328 329 self.sa.rollback()
329 330 log.exception('Exception occurred when creating repository group, '
330 331 'doing cleanup...')
331 332 # rollback things manually !
332 333 repo_group = RepoGroup.get_by_group_name(group_name)
333 334 if repo_group:
334 335 RepoGroup.delete(repo_group.group_id)
335 336 self.sa.commit()
336 337 if cleanup_group:
337 338 RepoGroupModel()._delete_filesystem_group(repo_group)
338 339 raise
339 340
340 341 def update_permissions(
341 342 self, repo_group, perm_additions=None, perm_updates=None,
342 343 perm_deletions=None, recursive=None, check_perms=True,
343 344 cur_user=None):
344 345 from rhodecode.model.repo import RepoModel
345 346 from rhodecode.lib.auth import HasUserGroupPermissionAny
346 347
347 348 if not perm_additions:
348 349 perm_additions = []
349 350 if not perm_updates:
350 351 perm_updates = []
351 352 if not perm_deletions:
352 353 perm_deletions = []
353 354
354 355 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
355 356
356 357 changes = {
357 358 'added': [],
358 359 'updated': [],
359 360 'deleted': [],
360 361 'default_user_changed': None
361 362 }
362 363
363 364 def _set_perm_user(obj, user, perm):
364 365 if isinstance(obj, RepoGroup):
365 366 self.grant_user_permission(
366 367 repo_group=obj, user=user, perm=perm)
367 368 elif isinstance(obj, Repository):
368 369 # private repos will not allow to change the default
369 370 # permissions using recursive mode
370 371 if obj.private and user == User.DEFAULT_USER:
371 372 return
372 373
373 374 # we set group permission but we have to switch to repo
374 375 # permission
375 376 perm = perm.replace('group.', 'repository.')
376 377 RepoModel().grant_user_permission(
377 378 repo=obj, user=user, perm=perm)
378 379
379 380 def _set_perm_group(obj, users_group, perm):
380 381 if isinstance(obj, RepoGroup):
381 382 self.grant_user_group_permission(
382 383 repo_group=obj, group_name=users_group, perm=perm)
383 384 elif isinstance(obj, Repository):
384 385 # we set group permission but we have to switch to repo
385 386 # permission
386 387 perm = perm.replace('group.', 'repository.')
387 388 RepoModel().grant_user_group_permission(
388 389 repo=obj, group_name=users_group, perm=perm)
389 390
390 391 def _revoke_perm_user(obj, user):
391 392 if isinstance(obj, RepoGroup):
392 393 self.revoke_user_permission(repo_group=obj, user=user)
393 394 elif isinstance(obj, Repository):
394 395 RepoModel().revoke_user_permission(repo=obj, user=user)
395 396
396 397 def _revoke_perm_group(obj, user_group):
397 398 if isinstance(obj, RepoGroup):
398 399 self.revoke_user_group_permission(
399 400 repo_group=obj, group_name=user_group)
400 401 elif isinstance(obj, Repository):
401 402 RepoModel().revoke_user_group_permission(
402 403 repo=obj, group_name=user_group)
403 404
404 405 # start updates
405 406 log.debug('Now updating permissions for %s in recursive mode:%s',
406 407 repo_group, recursive)
407 408
408 409 # initialize check function, we'll call that multiple times
409 410 has_group_perm = HasUserGroupPermissionAny(*req_perms)
410 411
411 412 for obj in repo_group.recursive_groups_and_repos():
412 413 # iterated obj is an instance of a repos group or repository in
413 414 # that group, recursive option can be: none, repos, groups, all
414 415 if recursive == 'all':
415 416 obj = obj
416 417 elif recursive == 'repos':
417 418 # skip groups, other than this one
418 419 if isinstance(obj, RepoGroup) and not obj == repo_group:
419 420 continue
420 421 elif recursive == 'groups':
421 422 # skip repos
422 423 if isinstance(obj, Repository):
423 424 continue
424 425 else: # recursive == 'none':
425 426 # DEFAULT option - don't apply to iterated objects
426 427 # also we do a break at the end of this loop. if we are not
427 428 # in recursive mode
428 429 obj = repo_group
429 430
430 431 change_obj = obj.get_api_data()
431 432
432 433 # update permissions
433 434 for member_id, perm, member_type in perm_updates:
434 435 member_id = int(member_id)
435 436 if member_type == 'user':
436 437 member_name = User.get(member_id).username
437 438 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
438 439 # NOTE(dan): detect if we changed permissions for default user
439 440 perm_obj = self.sa.query(UserRepoGroupToPerm) \
440 441 .filter(UserRepoGroupToPerm.user_id == member_id) \
441 442 .filter(UserRepoGroupToPerm.group == repo_group) \
442 443 .scalar()
443 444 if perm_obj and perm_obj.permission.permission_name != perm:
444 445 changes['default_user_changed'] = True
445 446
446 447 # this updates also current one if found
447 448 _set_perm_user(obj, user=member_id, perm=perm)
448 449 elif member_type == 'user_group':
449 450 member_name = UserGroup.get(member_id).users_group_name
450 451 if not check_perms or has_group_perm(member_name,
451 452 user=cur_user):
452 453 _set_perm_group(obj, users_group=member_id, perm=perm)
453 454 else:
454 455 raise ValueError("member_type must be 'user' or 'user_group' "
455 456 "got {} instead".format(member_type))
456 457
457 458 changes['updated'].append(
458 459 {'change_obj': change_obj, 'type': member_type,
459 460 'id': member_id, 'name': member_name, 'new_perm': perm})
460 461
461 462 # set new permissions
462 463 for member_id, perm, member_type in perm_additions:
463 464 member_id = int(member_id)
464 465 if member_type == 'user':
465 466 member_name = User.get(member_id).username
466 467 _set_perm_user(obj, user=member_id, perm=perm)
467 468 elif member_type == 'user_group':
468 469 # check if we have permissions to alter this usergroup
469 470 member_name = UserGroup.get(member_id).users_group_name
470 471 if not check_perms or has_group_perm(member_name,
471 472 user=cur_user):
472 473 _set_perm_group(obj, users_group=member_id, perm=perm)
473 474 else:
474 475 raise ValueError("member_type must be 'user' or 'user_group' "
475 476 "got {} instead".format(member_type))
476 477
477 478 changes['added'].append(
478 479 {'change_obj': change_obj, 'type': member_type,
479 480 'id': member_id, 'name': member_name, 'new_perm': perm})
480 481
481 482 # delete permissions
482 483 for member_id, perm, member_type in perm_deletions:
483 484 member_id = int(member_id)
484 485 if member_type == 'user':
485 486 member_name = User.get(member_id).username
486 487 _revoke_perm_user(obj, user=member_id)
487 488 elif member_type == 'user_group':
488 489 # check if we have permissions to alter this usergroup
489 490 member_name = UserGroup.get(member_id).users_group_name
490 491 if not check_perms or has_group_perm(member_name,
491 492 user=cur_user):
492 493 _revoke_perm_group(obj, user_group=member_id)
493 494 else:
494 495 raise ValueError("member_type must be 'user' or 'user_group' "
495 496 "got {} instead".format(member_type))
496 497
497 498 changes['deleted'].append(
498 499 {'change_obj': change_obj, 'type': member_type,
499 500 'id': member_id, 'name': member_name, 'new_perm': perm})
500 501
501 502 # if it's not recursive call for all,repos,groups
502 503 # break the loop and don't proceed with other changes
503 504 if recursive not in ['all', 'repos', 'groups']:
504 505 break
505 506
506 507 return changes
507 508
508 509 def update(self, repo_group, form_data):
509 510 try:
510 511 repo_group = self._get_repo_group(repo_group)
511 512 old_path = repo_group.full_path
512 513
513 514 # change properties
514 515 if 'group_description' in form_data:
515 516 repo_group.group_description = form_data['group_description']
516 517
517 518 if 'enable_locking' in form_data:
518 519 repo_group.enable_locking = form_data['enable_locking']
519 520
520 521 if 'group_parent_id' in form_data:
521 522 parent_group = (
522 523 self._get_repo_group(form_data['group_parent_id']))
523 524 repo_group.group_parent_id = (
524 525 parent_group.group_id if parent_group else None)
525 526 repo_group.parent_group = parent_group
526 527
527 528 # mikhail: to update the full_path, we have to explicitly
528 529 # update group_name
529 530 group_name = form_data.get('group_name', repo_group.name)
530 531 repo_group.group_name = repo_group.get_new_name(group_name)
531 532
532 533 new_path = repo_group.full_path
533 534
535 affected_user_ids = []
534 536 if 'user' in form_data:
535 repo_group.user = User.get_by_username(form_data['user'])
537 old_owner_id = repo_group.user.user_id
538 new_owner = User.get_by_username(form_data['user'])
539 repo_group.user = new_owner
540
541 if old_owner_id != new_owner.user_id:
542 affected_user_ids = [new_owner.user_id, old_owner_id]
536 543
537 544 self.sa.add(repo_group)
538 545
539 546 # iterate over all members of this groups and do fixes
540 547 # set locking if given
541 548 # if obj is a repoGroup also fix the name of the group according
542 549 # to the parent
543 550 # if obj is a Repo fix it's name
544 551 # this can be potentially heavy operation
545 552 for obj in repo_group.recursive_groups_and_repos():
546 553 # set the value from it's parent
547 554 obj.enable_locking = repo_group.enable_locking
548 555 if isinstance(obj, RepoGroup):
549 556 new_name = obj.get_new_name(obj.name)
550 557 log.debug('Fixing group %s to new name %s',
551 558 obj.group_name, new_name)
552 559 obj.group_name = new_name
553 560
554 561 elif isinstance(obj, Repository):
555 562 # we need to get all repositories from this new group and
556 563 # rename them accordingly to new group path
557 564 new_name = obj.get_new_name(obj.just_name)
558 565 log.debug('Fixing repo %s to new name %s',
559 566 obj.repo_name, new_name)
560 567 obj.repo_name = new_name
561 568
562 569 self.sa.add(obj)
563 570
564 571 self._rename_group(old_path, new_path)
565 572
566 573 # Trigger update event.
567 574 events.trigger(events.RepoGroupUpdateEvent(repo_group))
568 575
576 if affected_user_ids:
577 PermissionModel().trigger_permission_flush(affected_user_ids)
578
569 579 return repo_group
570 580 except Exception:
571 581 log.error(traceback.format_exc())
572 582 raise
573 583
574 584 def delete(self, repo_group, force_delete=False, fs_remove=True):
575 585 repo_group = self._get_repo_group(repo_group)
576 586 if not repo_group:
577 587 return False
578 588 try:
579 589 self.sa.delete(repo_group)
580 590 if fs_remove:
581 591 self._delete_filesystem_group(repo_group, force_delete)
582 592 else:
583 593 log.debug('skipping removal from filesystem')
584 594
585 595 # Trigger delete event.
586 596 events.trigger(events.RepoGroupDeleteEvent(repo_group))
587 597 return True
588 598
589 599 except Exception:
590 600 log.error('Error removing repo_group %s', repo_group)
591 601 raise
592 602
593 603 def grant_user_permission(self, repo_group, user, perm):
594 604 """
595 605 Grant permission for user on given repository group, or update
596 606 existing one if found
597 607
598 608 :param repo_group: Instance of RepoGroup, repositories_group_id,
599 609 or repositories_group name
600 610 :param user: Instance of User, user_id or username
601 611 :param perm: Instance of Permission, or permission_name
602 612 """
603 613
604 614 repo_group = self._get_repo_group(repo_group)
605 615 user = self._get_user(user)
606 616 permission = self._get_perm(perm)
607 617
608 618 # check if we have that permission already
609 619 obj = self.sa.query(UserRepoGroupToPerm)\
610 620 .filter(UserRepoGroupToPerm.user == user)\
611 621 .filter(UserRepoGroupToPerm.group == repo_group)\
612 622 .scalar()
613 623 if obj is None:
614 624 # create new !
615 625 obj = UserRepoGroupToPerm()
616 626 obj.group = repo_group
617 627 obj.user = user
618 628 obj.permission = permission
619 629 self.sa.add(obj)
620 630 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
621 631 action_logger_generic(
622 632 'granted permission: {} to user: {} on repogroup: {}'.format(
623 633 perm, user, repo_group), namespace='security.repogroup')
624 634 return obj
625 635
626 636 def revoke_user_permission(self, repo_group, user):
627 637 """
628 638 Revoke permission for user on given repository group
629 639
630 640 :param repo_group: Instance of RepoGroup, repositories_group_id,
631 641 or repositories_group name
632 642 :param user: Instance of User, user_id or username
633 643 """
634 644
635 645 repo_group = self._get_repo_group(repo_group)
636 646 user = self._get_user(user)
637 647
638 648 obj = self.sa.query(UserRepoGroupToPerm)\
639 649 .filter(UserRepoGroupToPerm.user == user)\
640 650 .filter(UserRepoGroupToPerm.group == repo_group)\
641 651 .scalar()
642 652 if obj:
643 653 self.sa.delete(obj)
644 654 log.debug('Revoked perm on %s on %s', repo_group, user)
645 655 action_logger_generic(
646 656 'revoked permission from user: {} on repogroup: {}'.format(
647 657 user, repo_group), namespace='security.repogroup')
648 658
649 659 def grant_user_group_permission(self, repo_group, group_name, perm):
650 660 """
651 661 Grant permission for user group on given repository group, or update
652 662 existing one if found
653 663
654 664 :param repo_group: Instance of RepoGroup, repositories_group_id,
655 665 or repositories_group name
656 666 :param group_name: Instance of UserGroup, users_group_id,
657 667 or user group name
658 668 :param perm: Instance of Permission, or permission_name
659 669 """
660 670 repo_group = self._get_repo_group(repo_group)
661 671 group_name = self._get_user_group(group_name)
662 672 permission = self._get_perm(perm)
663 673
664 674 # check if we have that permission already
665 675 obj = self.sa.query(UserGroupRepoGroupToPerm)\
666 676 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
667 677 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
668 678 .scalar()
669 679
670 680 if obj is None:
671 681 # create new
672 682 obj = UserGroupRepoGroupToPerm()
673 683
674 684 obj.group = repo_group
675 685 obj.users_group = group_name
676 686 obj.permission = permission
677 687 self.sa.add(obj)
678 688 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
679 689 action_logger_generic(
680 690 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
681 691 perm, group_name, repo_group), namespace='security.repogroup')
682 692 return obj
683 693
684 694 def revoke_user_group_permission(self, repo_group, group_name):
685 695 """
686 696 Revoke permission for user group on given repository group
687 697
688 698 :param repo_group: Instance of RepoGroup, repositories_group_id,
689 699 or repositories_group name
690 700 :param group_name: Instance of UserGroup, users_group_id,
691 701 or user group name
692 702 """
693 703 repo_group = self._get_repo_group(repo_group)
694 704 group_name = self._get_user_group(group_name)
695 705
696 706 obj = self.sa.query(UserGroupRepoGroupToPerm)\
697 707 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
698 708 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
699 709 .scalar()
700 710 if obj:
701 711 self.sa.delete(obj)
702 712 log.debug('Revoked perm to %s on %s', repo_group, group_name)
703 713 action_logger_generic(
704 714 'revoked permission from usergroup: {} on repogroup: {}'.format(
705 715 group_name, repo_group), namespace='security.repogroup')
706 716
707 717 @classmethod
708 718 def update_commit_cache(cls, repo_groups=None):
709 719 if not repo_groups:
710 720 repo_groups = RepoGroup.getAll()
711 721 for repo_group in repo_groups:
712 722 repo_group.update_commit_cache()
713 723
714 724 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
715 725 super_user_actions=False):
716 726
717 727 from pyramid.threadlocal import get_current_request
718 728 _render = get_current_request().get_partial_renderer(
719 729 'rhodecode:templates/data_table/_dt_elements.mako')
720 730 c = _render.get_call_context()
721 731 h = _render.get_helpers()
722 732
723 733 def quick_menu(repo_group_name):
724 734 return _render('quick_repo_group_menu', repo_group_name)
725 735
726 736 def repo_group_lnk(repo_group_name):
727 737 return _render('repo_group_name', repo_group_name)
728 738
729 739 def last_change(last_change):
730 740 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
731 741 ts = time.time()
732 742 utc_offset = (datetime.datetime.fromtimestamp(ts)
733 743 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
734 744 last_change = last_change + datetime.timedelta(seconds=utc_offset)
735 745 return _render("last_change", last_change)
736 746
737 747 def desc(desc, personal):
738 748 return _render(
739 749 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
740 750
741 751 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
742 752 return _render(
743 753 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
744 754
745 755 def repo_group_name(repo_group_name, children_groups):
746 756 return _render("repo_group_name", repo_group_name, children_groups)
747 757
748 758 def user_profile(username):
749 759 return _render('user_profile', username)
750 760
751 761 repo_group_data = []
752 762 for group in repo_group_list:
753 763 # NOTE(marcink): because we use only raw column we need to load it like that
754 764 changeset_cache = RepoGroup._load_changeset_cache(
755 765 '', group._changeset_cache)
756 766 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
757 767 row = {
758 768 "menu": quick_menu(group.group_name),
759 769 "name": repo_group_lnk(group.group_name),
760 770 "name_raw": group.group_name,
761 771
762 772 "last_change": last_change(last_commit_change),
763 773
764 774 "last_changeset": "",
765 775 "last_changeset_raw": "",
766 776
767 777 "desc": desc(h.escape(group.group_description), group.personal),
768 778 "top_level_repos": 0,
769 779 "owner": user_profile(group.User.username)
770 780 }
771 781 if admin:
772 782 repo_count = group.repositories.count()
773 783 children_groups = map(
774 784 h.safe_unicode,
775 785 itertools.chain((g.name for g in group.parents),
776 786 (x.name for x in [group])))
777 787 row.update({
778 788 "action": repo_group_actions(
779 789 group.group_id, group.group_name, repo_count),
780 790 "top_level_repos": repo_count,
781 791 "name": repo_group_name(group.group_name, children_groups),
782 792
783 793 })
784 794 repo_group_data.append(row)
785 795
786 796 return repo_group_data
787 797
788 798 def get_repo_groups_data_table(
789 799 self, draw, start, limit,
790 800 search_q, order_by, order_dir,
791 801 auth_user, repo_group_id):
792 802 from rhodecode.model.scm import RepoGroupList
793 803
794 804 _perms = ['group.read', 'group.write', 'group.admin']
795 805 repo_groups = RepoGroup.query() \
796 806 .filter(RepoGroup.group_parent_id == repo_group_id) \
797 807 .all()
798 808 auth_repo_group_list = RepoGroupList(
799 809 repo_groups, perm_set=_perms,
800 810 extra_kwargs=dict(user=auth_user))
801 811
802 812 allowed_ids = [-1]
803 813 for repo_group in auth_repo_group_list:
804 814 allowed_ids.append(repo_group.group_id)
805 815
806 816 repo_groups_data_total_count = RepoGroup.query() \
807 817 .filter(RepoGroup.group_parent_id == repo_group_id) \
808 818 .filter(or_(
809 819 # generate multiple IN to fix limitation problems
810 820 *in_filter_generator(RepoGroup.group_id, allowed_ids))
811 821 ) \
812 822 .count()
813 823
814 824 base_q = Session.query(
815 825 RepoGroup.group_name,
816 826 RepoGroup.group_name_hash,
817 827 RepoGroup.group_description,
818 828 RepoGroup.group_id,
819 829 RepoGroup.personal,
820 830 RepoGroup.updated_on,
821 831 RepoGroup._changeset_cache,
822 832 User,
823 833 ) \
824 834 .filter(RepoGroup.group_parent_id == repo_group_id) \
825 835 .filter(or_(
826 836 # generate multiple IN to fix limitation problems
827 837 *in_filter_generator(RepoGroup.group_id, allowed_ids))
828 838 ) \
829 839 .join(User, User.user_id == RepoGroup.user_id) \
830 840 .group_by(RepoGroup, User)
831 841
832 842 repo_groups_data_total_filtered_count = base_q.count()
833 843
834 844 sort_defined = False
835 845
836 846 if order_by == 'group_name':
837 847 sort_col = func.lower(RepoGroup.group_name)
838 848 sort_defined = True
839 849 elif order_by == 'user_username':
840 850 sort_col = User.username
841 851 else:
842 852 sort_col = getattr(RepoGroup, order_by, None)
843 853
844 854 if sort_defined or sort_col:
845 855 if order_dir == 'asc':
846 856 sort_col = sort_col.asc()
847 857 else:
848 858 sort_col = sort_col.desc()
849 859
850 860 base_q = base_q.order_by(sort_col)
851 861 base_q = base_q.offset(start).limit(limit)
852 862
853 863 repo_group_list = base_q.all()
854 864
855 865 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
856 866 repo_group_list=repo_group_list, admin=False)
857 867
858 868 data = ({
859 869 'draw': draw,
860 870 'data': repo_groups_data,
861 871 'recordsTotal': repo_groups_data_total_count,
862 872 'recordsFiltered': repo_groups_data_total_filtered_count,
863 873 })
864 874 return data
865 875
866 876 def _get_defaults(self, repo_group_name):
867 877 repo_group = RepoGroup.get_by_group_name(repo_group_name)
868 878
869 879 if repo_group is None:
870 880 return None
871 881
872 882 defaults = repo_group.get_dict()
873 883 defaults['repo_group_name'] = repo_group.name
874 884 defaults['repo_group_description'] = repo_group.group_description
875 885 defaults['repo_group_enable_locking'] = repo_group.enable_locking
876 886
877 887 # we use -1 as this is how in HTML, we mark an empty group
878 888 defaults['repo_group'] = defaults['group_parent_id'] or -1
879 889
880 890 # fill owner
881 891 if repo_group.user:
882 892 defaults.update({'user': repo_group.user.username})
883 893 else:
884 894 replacement_user = User.get_first_super_admin().username
885 895 defaults.update({'user': replacement_user})
886 896
887 897 return defaults
@@ -1,405 +1,411 b''
1 1
2 2 /******************************************************************************
3 3 * *
4 4 * DO NOT CHANGE THIS FILE MANUALLY *
5 5 * *
6 6 * *
7 7 * This file is automatically generated when the app starts up with *
8 8 * generate_js_files = true *
9 9 * *
10 10 * To add a route here pass jsroute=True to the route definition in the app *
11 11 * *
12 12 ******************************************************************************/
13 13 function registerRCRoutes() {
14 14 // routes registration
15 pyroutes.register('admin_artifacts', '/_admin/artifacts', []);
16 pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []);
17 pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']);
18 pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []);
19 pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']);
20 pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']);
15 21 pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']);
16 22 pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []);
17 23 pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []);
18 24 pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []);
19 25 pyroutes.register('admin_home', '/_admin', []);
20 26 pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []);
21 27 pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []);
22 28 pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []);
23 29 pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []);
24 30 pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []);
25 31 pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []);
26 32 pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []);
27 33 pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []);
28 34 pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []);
29 35 pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []);
30 36 pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []);
31 37 pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []);
32 38 pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []);
33 39 pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []);
34 40 pyroutes.register('admin_settings', '/_admin/settings', []);
35 41 pyroutes.register('admin_settings_automation', '/_admin/settings/automation', []);
36 42 pyroutes.register('admin_settings_automation_update', '/_admin/settings/automation/%(entry_id)s/update', ['entry_id']);
37 43 pyroutes.register('admin_settings_email', '/_admin/settings/email', []);
38 44 pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []);
39 45 pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []);
40 46 pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']);
41 47 pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []);
42 48 pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']);
43 49 pyroutes.register('admin_settings_global', '/_admin/settings/global', []);
44 50 pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []);
45 51 pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []);
46 52 pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []);
47 53 pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []);
48 54 pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []);
49 55 pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []);
50 56 pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []);
51 57 pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []);
52 58 pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []);
53 59 pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []);
54 60 pyroutes.register('admin_settings_license', '/_admin/settings/license', []);
55 61 pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []);
56 62 pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []);
57 63 pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []);
58 64 pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []);
59 65 pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []);
60 66 pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []);
61 67 pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []);
62 68 pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []);
63 69 pyroutes.register('admin_settings_scheduler_create', '/_admin/settings/scheduler/create', []);
64 70 pyroutes.register('admin_settings_scheduler_delete', '/_admin/settings/scheduler/%(schedule_id)s/delete', ['schedule_id']);
65 71 pyroutes.register('admin_settings_scheduler_edit', '/_admin/settings/scheduler/%(schedule_id)s', ['schedule_id']);
66 72 pyroutes.register('admin_settings_scheduler_execute', '/_admin/settings/scheduler/%(schedule_id)s/execute', ['schedule_id']);
67 73 pyroutes.register('admin_settings_scheduler_new', '/_admin/settings/scheduler/new', []);
68 74 pyroutes.register('admin_settings_scheduler_show_all', '/_admin/settings/scheduler', []);
69 75 pyroutes.register('admin_settings_scheduler_show_tasks', '/_admin/settings/scheduler/_tasks', []);
70 76 pyroutes.register('admin_settings_scheduler_update', '/_admin/settings/scheduler/%(schedule_id)s/update', ['schedule_id']);
71 77 pyroutes.register('admin_settings_search', '/_admin/settings/search', []);
72 78 pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []);
73 79 pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []);
74 80 pyroutes.register('admin_settings_system', '/_admin/settings/system', []);
75 81 pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []);
76 82 pyroutes.register('admin_settings_update', '/_admin/settings/update', []);
77 83 pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []);
78 84 pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []);
79 85 pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []);
80 86 pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []);
81 87 pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []);
82 88 pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []);
83 89 pyroutes.register('apiv2', '/_admin/api', []);
84 90 pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']);
85 91 pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']);
86 92 pyroutes.register('auth_home', '/_admin/auth*traverse', []);
87 93 pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']);
88 94 pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']);
89 95 pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []);
90 96 pyroutes.register('channelstream_proxy', '/_channelstream', []);
91 97 pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []);
92 98 pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']);
93 99 pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']);
94 100 pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']);
95 101 pyroutes.register('debug_style_home', '/_admin/debug_style', []);
96 102 pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']);
97 103 pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']);
98 104 pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']);
99 105 pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']);
100 106 pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']);
101 107 pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']);
102 108 pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']);
103 109 pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']);
104 110 pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']);
105 111 pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']);
106 112 pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']);
107 113 pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']);
108 114 pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']);
109 115 pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']);
110 116 pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']);
111 117 pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']);
112 118 pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']);
113 119 pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']);
114 120 pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']);
115 121 pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']);
116 122 pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']);
117 123 pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']);
118 124 pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']);
119 125 pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']);
120 126 pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']);
121 127 pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']);
122 128 pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']);
123 129 pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']);
124 130 pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']);
125 131 pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']);
126 132 pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']);
127 133 pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']);
128 134 pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']);
129 135 pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']);
130 136 pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']);
131 137 pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']);
132 138 pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']);
133 139 pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']);
134 140 pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']);
135 141 pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']);
136 142 pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']);
137 143 pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']);
138 144 pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']);
139 145 pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']);
140 146 pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']);
141 147 pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']);
142 148 pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']);
143 149 pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']);
144 150 pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']);
145 151 pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']);
146 152 pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']);
147 153 pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']);
148 154 pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']);
149 155 pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']);
150 156 pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']);
151 157 pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']);
152 158 pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']);
153 159 pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']);
154 160 pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']);
155 161 pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']);
156 162 pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']);
157 163 pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']);
158 164 pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']);
159 165 pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']);
160 166 pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']);
161 167 pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']);
162 168 pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']);
163 169 pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']);
164 170 pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']);
165 171 pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']);
166 172 pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']);
167 173 pyroutes.register('favicon', '/favicon.ico', []);
168 174 pyroutes.register('file_preview', '/_file_preview', []);
169 175 pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']);
170 176 pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']);
171 177 pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']);
172 178 pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']);
173 179 pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']);
174 180 pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']);
175 181 pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']);
176 182 pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']);
177 183 pyroutes.register('gists_create', '/_admin/gists/create', []);
178 184 pyroutes.register('gists_new', '/_admin/gists/new', []);
179 185 pyroutes.register('gists_show', '/_admin/gists', []);
180 186 pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']);
181 187 pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']);
182 188 pyroutes.register('global_integrations_home', '/_admin/integrations', []);
183 189 pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']);
184 190 pyroutes.register('global_integrations_new', '/_admin/integrations/new', []);
185 191 pyroutes.register('goto_switcher_data', '/_goto_data', []);
186 192 pyroutes.register('home', '/', []);
187 193 pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']);
188 194 pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']);
189 195 pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']);
190 196 pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']);
191 197 pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']);
192 198 pyroutes.register('journal', '/_admin/journal', []);
193 199 pyroutes.register('journal_atom', '/_admin/journal/atom', []);
194 200 pyroutes.register('journal_public', '/_admin/public_journal', []);
195 201 pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []);
196 202 pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []);
197 203 pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []);
198 204 pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []);
199 205 pyroutes.register('journal_rss', '/_admin/journal/rss', []);
200 206 pyroutes.register('login', '/_admin/login', []);
201 207 pyroutes.register('logout', '/_admin/logout', []);
202 208 pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []);
203 209 pyroutes.register('main_page_repos_data', '/_home_repos', []);
204 210 pyroutes.register('markup_preview', '/_markup_preview', []);
205 211 pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []);
206 212 pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []);
207 213 pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []);
208 214 pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []);
209 215 pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []);
210 216 pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []);
211 217 pyroutes.register('my_account_edit', '/_admin/my_account/edit', []);
212 218 pyroutes.register('my_account_emails', '/_admin/my_account/emails', []);
213 219 pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []);
214 220 pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []);
215 221 pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []);
216 222 pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []);
217 223 pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']);
218 224 pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []);
219 225 pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []);
220 226 pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []);
221 227 pyroutes.register('my_account_password', '/_admin/my_account/password', []);
222 228 pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []);
223 229 pyroutes.register('my_account_perms', '/_admin/my_account/perms', []);
224 230 pyroutes.register('my_account_profile', '/_admin/my_account/profile', []);
225 231 pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []);
226 232 pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []);
227 233 pyroutes.register('my_account_repos', '/_admin/my_account/repos', []);
228 234 pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []);
229 235 pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []);
230 236 pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []);
231 237 pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []);
232 238 pyroutes.register('my_account_update', '/_admin/my_account/update', []);
233 239 pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []);
234 240 pyroutes.register('my_account_watched', '/_admin/my_account/watched', []);
235 241 pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']);
236 242 pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []);
237 243 pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']);
238 244 pyroutes.register('notifications_show_all', '/_admin/notifications', []);
239 245 pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']);
240 246 pyroutes.register('ops_error_test', '/_admin/ops/error', []);
241 247 pyroutes.register('ops_ping', '/_admin/ops/ping', []);
242 248 pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []);
243 249 pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']);
244 250 pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']);
245 251 pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']);
246 252 pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']);
247 253 pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']);
248 254 pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']);
249 255 pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']);
250 256 pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']);
251 257 pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']);
252 258 pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']);
253 259 pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']);
254 260 pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']);
255 261 pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']);
256 262 pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']);
257 263 pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']);
258 264 pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']);
259 265 pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']);
260 266 pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']);
261 267 pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']);
262 268 pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']);
263 269 pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']);
264 270 pyroutes.register('register', '/_admin/register', []);
265 271 pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']);
266 272 pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']);
267 273 pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']);
268 274 pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']);
269 275 pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']);
270 276 pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']);
271 277 pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']);
272 278 pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']);
273 279 pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []);
274 280 pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []);
275 281 pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']);
276 282 pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']);
277 283 pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']);
278 284 pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']);
279 285 pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
280 286 pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']);
281 287 pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']);
282 288 pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']);
283 289 pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']);
284 290 pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']);
285 291 pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']);
286 292 pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_history_id)s/history_view', ['repo_name', 'commit_id', 'comment_history_id']);
287 293 pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']);
288 294 pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']);
289 295 pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']);
290 296 pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']);
291 297 pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']);
292 298 pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']);
293 299 pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']);
294 300 pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']);
295 301 pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']);
296 302 pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
297 303 pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
298 304 pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']);
299 305 pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']);
300 306 pyroutes.register('repo_create', '/_admin/repos/create', []);
301 307 pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']);
302 308 pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']);
303 309 pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']);
304 310 pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']);
305 311 pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
306 312 pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
307 313 pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
308 314 pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
309 315 pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
310 316 pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
311 317 pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
312 318 pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
313 319 pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']);
314 320 pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']);
315 321 pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
316 322 pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
317 323 pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
318 324 pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
319 325 pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
320 326 pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']);
321 327 pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']);
322 328 pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
323 329 pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
324 330 pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
325 331 pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
326 332 pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
327 333 pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']);
328 334 pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']);
329 335 pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']);
330 336 pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']);
331 337 pyroutes.register('repo_group_create', '/_admin/repo_group/create', []);
332 338 pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']);
333 339 pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']);
334 340 pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']);
335 341 pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']);
336 342 pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']);
337 343 pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']);
338 344 pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']);
339 345 pyroutes.register('repo_group_list_data', '/_repo_groups', []);
340 346 pyroutes.register('repo_group_new', '/_admin/repo_group/new', []);
341 347 pyroutes.register('repo_groups', '/_admin/repo_groups', []);
342 348 pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []);
343 349 pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']);
344 350 pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']);
345 351 pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']);
346 352 pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']);
347 353 pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']);
348 354 pyroutes.register('repo_list_data', '/_repos', []);
349 355 pyroutes.register('repo_new', '/_admin/repos/new', []);
350 356 pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']);
351 357 pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']);
352 358 pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']);
353 359 pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']);
354 360 pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']);
355 361 pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']);
356 362 pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']);
357 363 pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']);
358 364 pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']);
359 365 pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']);
360 366 pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']);
361 367 pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']);
362 368 pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']);
363 369 pyroutes.register('repos', '/_admin/repos', []);
364 370 pyroutes.register('repos_data', '/_admin/repos_data', []);
365 371 pyroutes.register('reset_password', '/_admin/password_reset', []);
366 372 pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []);
367 373 pyroutes.register('robots', '/robots.txt', []);
368 374 pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']);
369 375 pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']);
370 376 pyroutes.register('search', '/_admin/search', []);
371 377 pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']);
372 378 pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']);
373 379 pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']);
374 380 pyroutes.register('store_user_session_value', '/_store_session_attr', []);
375 381 pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']);
376 382 pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']);
377 383 pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']);
378 384 pyroutes.register('toggle_following', '/_admin/toggle_following', []);
379 385 pyroutes.register('upload_file', '/_file_store/upload', []);
380 386 pyroutes.register('user_autocomplete_data', '/_users', []);
381 387 pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']);
382 388 pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']);
383 389 pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']);
384 390 pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']);
385 391 pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']);
386 392 pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']);
387 393 pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']);
388 394 pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']);
389 395 pyroutes.register('user_group_autocomplete_data', '/_user_groups', []);
390 396 pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']);
391 397 pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']);
392 398 pyroutes.register('user_groups', '/_admin/user_groups', []);
393 399 pyroutes.register('user_groups_create', '/_admin/user_groups/create', []);
394 400 pyroutes.register('user_groups_data', '/_admin/user_groups_data', []);
395 401 pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']);
396 402 pyroutes.register('user_groups_new', '/_admin/user_groups/new', []);
397 403 pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']);
398 404 pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']);
399 405 pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']);
400 406 pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']);
401 407 pyroutes.register('users', '/_admin/users', []);
402 408 pyroutes.register('users_create', '/_admin/users/create', []);
403 409 pyroutes.register('users_data', '/_admin/users_data', []);
404 410 pyroutes.register('users_new', '/_admin/users/new', []);
405 411 }
@@ -1,1639 +1,1639 b''
1 1 // # Copyright (C) 2010-2020 RhodeCode GmbH
2 2 // #
3 3 // # This program is free software: you can redistribute it and/or modify
4 4 // # it under the terms of the GNU Affero General Public License, version 3
5 5 // # (only), as published by the Free Software Foundation.
6 6 // #
7 7 // # This program is distributed in the hope that it will be useful,
8 8 // # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 // # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 // # GNU General Public License for more details.
11 11 // #
12 12 // # You should have received a copy of the GNU Affero General Public License
13 13 // # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 // #
15 15 // # This program is dual-licensed. If you wish to learn more about the
16 16 // # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 // # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 var firefoxAnchorFix = function() {
20 20 // hack to make anchor links behave properly on firefox, in our inline
21 21 // comments generation when comments are injected firefox is misbehaving
22 22 // when jumping to anchor links
23 23 if (location.href.indexOf('#') > -1) {
24 24 location.href += '';
25 25 }
26 26 };
27 27
28 28
29 29 var linkifyComments = function(comments) {
30 30 var firstCommentId = null;
31 31 if (comments) {
32 32 firstCommentId = $(comments[0]).data('comment-id');
33 33 }
34 34
35 35 if (firstCommentId){
36 36 $('#inline-comments-counter').attr('href', '#comment-' + firstCommentId);
37 37 }
38 38 };
39 39
40 40
41 41 var bindToggleButtons = function() {
42 42 $('.comment-toggle').on('click', function() {
43 43 $(this).parent().nextUntil('tr.line').toggle('inline-comments');
44 44 });
45 45 };
46 46
47 47
48 48 var _submitAjaxPOST = function(url, postData, successHandler, failHandler) {
49 49 failHandler = failHandler || function() {};
50 50 postData = toQueryString(postData);
51 51 var request = $.ajax({
52 52 url: url,
53 53 type: 'POST',
54 54 data: postData,
55 55 headers: {'X-PARTIAL-XHR': true}
56 56 })
57 57 .done(function (data) {
58 58 successHandler(data);
59 59 })
60 60 .fail(function (data, textStatus, errorThrown) {
61 61 failHandler(data, textStatus, errorThrown)
62 62 });
63 63 return request;
64 64 };
65 65
66 66
67 67 /* Comment form for main and inline comments */
68 68 (function(mod) {
69 69
70 70 if (typeof exports == "object" && typeof module == "object") {
71 71 // CommonJS
72 72 module.exports = mod();
73 73 }
74 74 else {
75 75 // Plain browser env
76 76 (this || window).CommentForm = mod();
77 77 }
78 78
79 79 })(function() {
80 80 "use strict";
81 81
82 82 function CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id) {
83 83
84 84 if (!(this instanceof CommentForm)) {
85 85 return new CommentForm(formElement, commitId, pullRequestId, lineNo, initAutocompleteActions, resolvesCommentId, edit, comment_id);
86 86 }
87 87
88 88 // bind the element instance to our Form
89 89 $(formElement).get(0).CommentForm = this;
90 90
91 91 this.withLineNo = function(selector) {
92 92 var lineNo = this.lineNo;
93 93 if (lineNo === undefined) {
94 94 return selector
95 95 } else {
96 96 return selector + '_' + lineNo;
97 97 }
98 98 };
99 99
100 100 this.commitId = commitId;
101 101 this.pullRequestId = pullRequestId;
102 102 this.lineNo = lineNo;
103 103 this.initAutocompleteActions = initAutocompleteActions;
104 104
105 105 this.previewButton = this.withLineNo('#preview-btn');
106 106 this.previewContainer = this.withLineNo('#preview-container');
107 107
108 108 this.previewBoxSelector = this.withLineNo('#preview-box');
109 109
110 110 this.editButton = this.withLineNo('#edit-btn');
111 111 this.editContainer = this.withLineNo('#edit-container');
112 112 this.cancelButton = this.withLineNo('#cancel-btn');
113 113 this.commentType = this.withLineNo('#comment_type');
114 114
115 115 this.resolvesId = null;
116 116 this.resolvesActionId = null;
117 117
118 118 this.closesPr = '#close_pull_request';
119 119
120 120 this.cmBox = this.withLineNo('#text');
121 121 this.cm = initCommentBoxCodeMirror(this, this.cmBox, this.initAutocompleteActions);
122 122
123 123 this.statusChange = this.withLineNo('#change_status');
124 124
125 125 this.submitForm = formElement;
126 126
127 127 this.submitButton = $(this.submitForm).find('.submit-comment-action');
128 128 this.submitButtonText = this.submitButton.val();
129 129
130 130 this.submitDraftButton = $(this.submitForm).find('.submit-draft-action');
131 131 this.submitDraftButtonText = this.submitDraftButton.val();
132 132
133 133 this.previewUrl = pyroutes.url('repo_commit_comment_preview',
134 134 {'repo_name': templateContext.repo_name,
135 135 'commit_id': templateContext.commit_data.commit_id});
136 136
137 137 if (edit){
138 138 this.submitDraftButton.hide();
139 139 this.submitButtonText = _gettext('Update Comment');
140 140 $(this.commentType).prop('disabled', true);
141 141 $(this.commentType).addClass('disabled');
142 142 var editInfo =
143 143 '';
144 144 $(editInfo).insertBefore($(this.editButton).parent());
145 145 }
146 146
147 147 if (resolvesCommentId){
148 148 this.resolvesId = '#resolve_comment_{0}'.format(resolvesCommentId);
149 149 this.resolvesActionId = '#resolve_comment_action_{0}'.format(resolvesCommentId);
150 150 $(this.commentType).prop('disabled', true);
151 151 $(this.commentType).addClass('disabled');
152 152
153 153 // disable select
154 154 setTimeout(function() {
155 155 $(self.statusChange).select2('readonly', true);
156 156 }, 10);
157 157
158 158 var resolvedInfo = (
159 159 '<li class="resolve-action">' +
160 160 '<input type="hidden" id="resolve_comment_{0}" name="resolve_comment_{0}" value="{0}">' +
161 161 '<button id="resolve_comment_action_{0}" class="resolve-text btn btn-sm" onclick="return Rhodecode.comments.submitResolution({0})">{1} #{0}</button>' +
162 162 '</li>'
163 163 ).format(resolvesCommentId, _gettext('resolve comment'));
164 164 $(resolvedInfo).insertAfter($(this.commentType).parent());
165 165 }
166 166
167 167 // based on commitId, or pullRequestId decide where do we submit
168 168 // out data
169 169 if (this.commitId){
170 170 var pyurl = 'repo_commit_comment_create';
171 171 if(edit){
172 172 pyurl = 'repo_commit_comment_edit';
173 173 }
174 174 this.submitUrl = pyroutes.url(pyurl,
175 175 {'repo_name': templateContext.repo_name,
176 176 'commit_id': this.commitId,
177 177 'comment_id': comment_id});
178 178 this.selfUrl = pyroutes.url('repo_commit',
179 179 {'repo_name': templateContext.repo_name,
180 180 'commit_id': this.commitId});
181 181
182 182 } else if (this.pullRequestId) {
183 183 var pyurl = 'pullrequest_comment_create';
184 184 if(edit){
185 185 pyurl = 'pullrequest_comment_edit';
186 186 }
187 187 this.submitUrl = pyroutes.url(pyurl,
188 188 {'repo_name': templateContext.repo_name,
189 189 'pull_request_id': this.pullRequestId,
190 190 'comment_id': comment_id});
191 191 this.selfUrl = pyroutes.url('pullrequest_show',
192 192 {'repo_name': templateContext.repo_name,
193 193 'pull_request_id': this.pullRequestId});
194 194
195 195 } else {
196 196 throw new Error(
197 197 'CommentForm requires pullRequestId, or commitId to be specified.')
198 198 }
199 199
200 200 // FUNCTIONS and helpers
201 201 var self = this;
202 202
203 203 this.isInline = function(){
204 204 return this.lineNo && this.lineNo != 'general';
205 205 };
206 206
207 207 this.getCmInstance = function(){
208 208 return this.cm
209 209 };
210 210
211 211 this.setPlaceholder = function(placeholder) {
212 212 var cm = this.getCmInstance();
213 213 if (cm){
214 214 cm.setOption('placeholder', placeholder);
215 215 }
216 216 };
217 217
218 218 this.getCommentStatus = function() {
219 219 return $(this.submitForm).find(this.statusChange).val();
220 220 };
221 221
222 222 this.getCommentType = function() {
223 223 return $(this.submitForm).find(this.commentType).val();
224 224 };
225 225
226 226 this.getDraftState = function () {
227 227 var submitterElem = $(this.submitForm).find('input[type="submit"].submitter');
228 228 var data = $(submitterElem).data('isDraft');
229 229 return data
230 230 }
231 231
232 232 this.getResolvesId = function() {
233 233 return $(this.submitForm).find(this.resolvesId).val() || null;
234 234 };
235 235
236 236 this.getClosePr = function() {
237 237 return $(this.submitForm).find(this.closesPr).val() || null;
238 238 };
239 239
240 240 this.markCommentResolved = function(resolvedCommentId){
241 241 Rhodecode.comments.markCommentResolved(resolvedCommentId)
242 242 };
243 243
244 244 this.isAllowedToSubmit = function() {
245 245 var commentDisabled = $(this.submitButton).prop('disabled');
246 246 var draftDisabled = $(this.submitDraftButton).prop('disabled');
247 247 return !commentDisabled && !draftDisabled;
248 248 };
249 249
250 250 this.initStatusChangeSelector = function(){
251 251 var formatChangeStatus = function(state, escapeMarkup) {
252 252 var originalOption = state.element;
253 253 var tmpl = '<i class="icon-circle review-status-{0}"></i><span>{1}</span>'.format($(originalOption).data('status'), escapeMarkup(state.text));
254 254 return tmpl
255 255 };
256 256 var formatResult = function(result, container, query, escapeMarkup) {
257 257 return formatChangeStatus(result, escapeMarkup);
258 258 };
259 259
260 260 var formatSelection = function(data, container, escapeMarkup) {
261 261 return formatChangeStatus(data, escapeMarkup);
262 262 };
263 263
264 264 $(this.submitForm).find(this.statusChange).select2({
265 265 placeholder: _gettext('Status Review'),
266 266 formatResult: formatResult,
267 267 formatSelection: formatSelection,
268 268 containerCssClass: "drop-menu status_box_menu",
269 269 dropdownCssClass: "drop-menu-dropdown",
270 270 dropdownAutoWidth: true,
271 271 minimumResultsForSearch: -1
272 272 });
273 273
274 274 $(this.submitForm).find(this.statusChange).on('change', function() {
275 275 var status = self.getCommentStatus();
276 276
277 277 if (status && !self.isInline()) {
278 278 $(self.submitButton).prop('disabled', false);
279 279 $(self.submitDraftButton).prop('disabled', false);
280 280 }
281 281
282 282 var placeholderText = _gettext('Comment text will be set automatically based on currently selected status ({0}) ...').format(status);
283 283 self.setPlaceholder(placeholderText)
284 284 })
285 285 };
286 286
287 287 // reset the comment form into it's original state
288 288 this.resetCommentFormState = function(content) {
289 289 content = content || '';
290 290
291 291 $(this.editContainer).show();
292 292 $(this.editButton).parent().addClass('active');
293 293
294 294 $(this.previewContainer).hide();
295 295 $(this.previewButton).parent().removeClass('active');
296 296
297 297 this.setActionButtonsDisabled(true);
298 298 self.cm.setValue(content);
299 299 self.cm.setOption("readOnly", false);
300 300
301 301 if (this.resolvesId) {
302 302 // destroy the resolve action
303 303 $(this.resolvesId).parent().remove();
304 304 }
305 305 // reset closingPR flag
306 306 $('.close-pr-input').remove();
307 307
308 308 $(this.statusChange).select2('readonly', false);
309 309 };
310 310
311 311 this.globalSubmitSuccessCallback = function(comment){
312 312 // default behaviour is to call GLOBAL hook, if it's registered.
313 313 if (window.commentFormGlobalSubmitSuccessCallback !== undefined){
314 314 commentFormGlobalSubmitSuccessCallback(comment);
315 315 }
316 316 };
317 317
318 318 this.submitAjaxPOST = function(url, postData, successHandler, failHandler) {
319 319 return _submitAjaxPOST(url, postData, successHandler, failHandler);
320 320 };
321 321
322 322 // overwrite a submitHandler, we need to do it for inline comments
323 323 this.setHandleFormSubmit = function(callback) {
324 324 this.handleFormSubmit = callback;
325 325 };
326 326
327 327 // overwrite a submitSuccessHandler
328 328 this.setGlobalSubmitSuccessCallback = function(callback) {
329 329 this.globalSubmitSuccessCallback = callback;
330 330 };
331 331
332 332 // default handler for for submit for main comments
333 333 this.handleFormSubmit = function() {
334 334 var text = self.cm.getValue();
335 335 var status = self.getCommentStatus();
336 336 var commentType = self.getCommentType();
337 337 var isDraft = self.getDraftState();
338 338 var resolvesCommentId = self.getResolvesId();
339 339 var closePullRequest = self.getClosePr();
340 340
341 341 if (text === "" && !status) {
342 342 return;
343 343 }
344 344
345 345 var excludeCancelBtn = false;
346 346 var submitEvent = true;
347 347 self.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
348 348 self.cm.setOption("readOnly", true);
349 349
350 350 var postData = {
351 351 'text': text,
352 352 'changeset_status': status,
353 353 'comment_type': commentType,
354 354 'csrf_token': CSRF_TOKEN
355 355 };
356 356
357 357 if (resolvesCommentId) {
358 358 postData['resolves_comment_id'] = resolvesCommentId;
359 359 }
360 360
361 361 if (closePullRequest) {
362 362 postData['close_pull_request'] = true;
363 363 }
364 364
365 365 // submitSuccess for general comments
366 366 var submitSuccessCallback = function(json_data) {
367 367 // reload page if we change status for single commit.
368 368 if (status && self.commitId) {
369 369 location.reload(true);
370 370 } else {
371 371 // inject newly created comments, json_data is {<comment_id>: {}}
372 372 Rhodecode.comments.attachGeneralComment(json_data)
373 373
374 374 self.resetCommentFormState();
375 375 timeagoActivate();
376 376 tooltipActivate();
377 377
378 378 // mark visually which comment was resolved
379 379 if (resolvesCommentId) {
380 380 self.markCommentResolved(resolvesCommentId);
381 381 }
382 382 }
383 383
384 384 // run global callback on submit
385 385 self.globalSubmitSuccessCallback({draft: isDraft, comment_id: comment_id});
386 386
387 387 };
388 388 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
389 389 var prefix = "Error while submitting comment.\n"
390 390 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
391 391 ajaxErrorSwal(message);
392 392 self.resetCommentFormState(text);
393 393 };
394 394 self.submitAjaxPOST(
395 395 self.submitUrl, postData, submitSuccessCallback, submitFailCallback);
396 396 };
397 397
398 398 this.previewSuccessCallback = function(o) {
399 399 $(self.previewBoxSelector).html(o);
400 400 $(self.previewBoxSelector).removeClass('unloaded');
401 401
402 402 // swap buttons, making preview active
403 403 $(self.previewButton).parent().addClass('active');
404 404 $(self.editButton).parent().removeClass('active');
405 405
406 406 // unlock buttons
407 407 self.setActionButtonsDisabled(false);
408 408 };
409 409
410 410 this.setActionButtonsDisabled = function(state, excludeCancelBtn, submitEvent) {
411 411 excludeCancelBtn = excludeCancelBtn || false;
412 412 submitEvent = submitEvent || false;
413 413
414 414 $(this.editButton).prop('disabled', state);
415 415 $(this.previewButton).prop('disabled', state);
416 416
417 417 if (!excludeCancelBtn) {
418 418 $(this.cancelButton).prop('disabled', state);
419 419 }
420 420
421 421 var submitState = state;
422 422 if (!submitEvent && this.getCommentStatus() && !self.isInline()) {
423 423 // if the value of commit review status is set, we allow
424 424 // submit button, but only on Main form, isInline means inline
425 425 submitState = false
426 426 }
427 427
428 428 $(this.submitButton).prop('disabled', submitState);
429 429 $(this.submitDraftButton).prop('disabled', submitState);
430 430
431 431 if (submitEvent) {
432 432 var isDraft = self.getDraftState();
433 433
434 434 if (isDraft) {
435 435 $(this.submitDraftButton).val(_gettext('Saving Draft...'));
436 436 } else {
437 437 $(this.submitButton).val(_gettext('Submitting...'));
438 438 }
439 439
440 440 } else {
441 441 $(this.submitButton).val(this.submitButtonText);
442 442 $(this.submitDraftButton).val(this.submitDraftButtonText);
443 443 }
444 444
445 445 };
446 446
447 447 // lock preview/edit/submit buttons on load, but exclude cancel button
448 448 var excludeCancelBtn = true;
449 449 this.setActionButtonsDisabled(true, excludeCancelBtn);
450 450
451 451 // anonymous users don't have access to initialized CM instance
452 452 if (this.cm !== undefined){
453 453 this.cm.on('change', function(cMirror) {
454 454 if (cMirror.getValue() === "") {
455 455 self.setActionButtonsDisabled(true, excludeCancelBtn)
456 456 } else {
457 457 self.setActionButtonsDisabled(false, excludeCancelBtn)
458 458 }
459 459 });
460 460 }
461 461
462 462 $(this.editButton).on('click', function(e) {
463 463 e.preventDefault();
464 464
465 465 $(self.previewButton).parent().removeClass('active');
466 466 $(self.previewContainer).hide();
467 467
468 468 $(self.editButton).parent().addClass('active');
469 469 $(self.editContainer).show();
470 470
471 471 });
472 472
473 473 $(this.previewButton).on('click', function(e) {
474 474 e.preventDefault();
475 475 var text = self.cm.getValue();
476 476
477 477 if (text === "") {
478 478 return;
479 479 }
480 480
481 481 var postData = {
482 482 'text': text,
483 483 'renderer': templateContext.visual.default_renderer,
484 484 'csrf_token': CSRF_TOKEN
485 485 };
486 486
487 487 // lock ALL buttons on preview
488 488 self.setActionButtonsDisabled(true);
489 489
490 490 $(self.previewBoxSelector).addClass('unloaded');
491 491 $(self.previewBoxSelector).html(_gettext('Loading ...'));
492 492
493 493 $(self.editContainer).hide();
494 494 $(self.previewContainer).show();
495 495
496 496 // by default we reset state of comment preserving the text
497 497 var previewFailCallback = function(jqXHR, textStatus, errorThrown) {
498 498 var prefix = "Error while preview of comment.\n"
499 499 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
500 500 ajaxErrorSwal(message);
501 501
502 502 self.resetCommentFormState(text)
503 503 };
504 504 self.submitAjaxPOST(
505 505 self.previewUrl, postData, self.previewSuccessCallback,
506 506 previewFailCallback);
507 507
508 508 $(self.previewButton).parent().addClass('active');
509 509 $(self.editButton).parent().removeClass('active');
510 510 });
511 511
512 512 $(this.submitForm).submit(function(e) {
513 513 e.preventDefault();
514 514 var allowedToSubmit = self.isAllowedToSubmit();
515 515 if (!allowedToSubmit){
516 516 return false;
517 517 }
518 518
519 519 self.handleFormSubmit();
520 520 });
521 521
522 522 }
523 523
524 524 return CommentForm;
525 525 });
526 526
527 527 /* selector for comment versions */
528 528 var initVersionSelector = function(selector, initialData) {
529 529
530 530 var formatResult = function(result, container, query, escapeMarkup) {
531 531
532 532 return renderTemplate('commentVersion', {
533 533 show_disabled: true,
534 534 version: result.comment_version,
535 535 user_name: result.comment_author_username,
536 536 gravatar_url: result.comment_author_gravatar,
537 537 size: 16,
538 538 timeago_component: result.comment_created_on,
539 539 })
540 540 };
541 541
542 542 $(selector).select2({
543 543 placeholder: "Edited",
544 544 containerCssClass: "drop-menu-comment-history",
545 545 dropdownCssClass: "drop-menu-dropdown",
546 546 dropdownAutoWidth: true,
547 547 minimumResultsForSearch: -1,
548 548 data: initialData,
549 549 formatResult: formatResult,
550 550 });
551 551
552 552 $(selector).on('select2-selecting', function (e) {
553 553 // hide the mast as we later do preventDefault()
554 554 $("#select2-drop-mask").click();
555 555 e.preventDefault();
556 556 e.choice.action();
557 557 });
558 558
559 559 $(selector).on("select2-open", function() {
560 560 timeagoActivate();
561 561 });
562 562 };
563 563
564 564 /* comments controller */
565 565 var CommentsController = function() {
566 566 var mainComment = '#text';
567 567 var self = this;
568 568
569 569 this.showVersion = function (comment_id, comment_history_id) {
570 570
571 571 var historyViewUrl = pyroutes.url(
572 572 'repo_commit_comment_history_view',
573 573 {
574 574 'repo_name': templateContext.repo_name,
575 575 'commit_id': comment_id,
576 576 'comment_history_id': comment_history_id,
577 577 }
578 578 );
579 579 successRenderCommit = function (data) {
580 580 SwalNoAnimation.fire({
581 581 html: data,
582 582 title: '',
583 583 });
584 584 };
585 585 failRenderCommit = function () {
586 586 SwalNoAnimation.fire({
587 587 html: 'Error while loading comment history',
588 588 title: '',
589 589 });
590 590 };
591 591 _submitAjaxPOST(
592 592 historyViewUrl, {'csrf_token': CSRF_TOKEN},
593 593 successRenderCommit,
594 594 failRenderCommit
595 595 );
596 596 };
597 597
598 598 this.getLineNumber = function(node) {
599 599 var $node = $(node);
600 600 var lineNo = $node.closest('td').attr('data-line-no');
601 601 if (lineNo === undefined && $node.data('commentInline')){
602 602 lineNo = $node.data('commentLineNo')
603 603 }
604 604
605 605 return lineNo
606 606 };
607 607
608 608 this.scrollToComment = function(node, offset, outdated) {
609 609 if (offset === undefined) {
610 610 offset = 0;
611 611 }
612 612 var outdated = outdated || false;
613 613 var klass = outdated ? 'div.comment-outdated' : 'div.comment-current';
614 614
615 615 if (!node) {
616 616 node = $('.comment-selected');
617 617 if (!node.length) {
618 618 node = $('comment-current')
619 619 }
620 620 }
621 621
622 622 $wrapper = $(node).closest('div.comment');
623 623
624 624 // show hidden comment when referenced.
625 625 if (!$wrapper.is(':visible')){
626 626 $wrapper.show();
627 627 }
628 628
629 629 $comment = $(node).closest(klass);
630 630 $comments = $(klass);
631 631
632 632 $('.comment-selected').removeClass('comment-selected');
633 633
634 634 var nextIdx = $(klass).index($comment) + offset;
635 635 if (nextIdx >= $comments.length) {
636 636 nextIdx = 0;
637 637 }
638 638 var $next = $(klass).eq(nextIdx);
639 639
640 640 var $cb = $next.closest('.cb');
641 641 $cb.removeClass('cb-collapsed');
642 642
643 643 var $filediffCollapseState = $cb.closest('.filediff').prev();
644 644 $filediffCollapseState.prop('checked', false);
645 645 $next.addClass('comment-selected');
646 646 scrollToElement($next);
647 647 return false;
648 648 };
649 649
650 650 this.nextComment = function(node) {
651 651 return self.scrollToComment(node, 1);
652 652 };
653 653
654 654 this.prevComment = function(node) {
655 655 return self.scrollToComment(node, -1);
656 656 };
657 657
658 658 this.nextOutdatedComment = function(node) {
659 659 return self.scrollToComment(node, 1, true);
660 660 };
661 661
662 662 this.prevOutdatedComment = function(node) {
663 663 return self.scrollToComment(node, -1, true);
664 664 };
665 665
666 666 this.cancelComment = function (node) {
667 667 var $node = $(node);
668 668 var edit = $(this).attr('edit');
669 669 var $inlineComments = $node.closest('div.inline-comments');
670 670
671 671 if (edit) {
672 672 var $general_comments = null;
673 673 if (!$inlineComments.length) {
674 674 $general_comments = $('#comments');
675 675 var $comment = $general_comments.parent().find('div.comment:hidden');
676 676 // show hidden general comment form
677 677 $('#cb-comment-general-form-placeholder').show();
678 678 } else {
679 679 var $comment = $inlineComments.find('div.comment:hidden');
680 680 }
681 681 $comment.show();
682 682 }
683 683 var $replyWrapper = $node.closest('.comment-inline-form').closest('.reply-thread-container-wrapper')
684 684 $replyWrapper.removeClass('comment-form-active');
685 685
686 686 var lastComment = $inlineComments.find('.comment-inline').last();
687 687 if ($(lastComment).hasClass('comment-outdated')) {
688 688 $replyWrapper.hide();
689 689 }
690 690
691 691 $node.closest('.comment-inline-form').remove();
692 692 return false;
693 693 };
694 694
695 695 this._deleteComment = function(node) {
696 696 var $node = $(node);
697 697 var $td = $node.closest('td');
698 698 var $comment = $node.closest('.comment');
699 699 var comment_id = $($comment).data('commentId');
700 700 var isDraft = $($comment).data('commentDraft');
701 701
702 702 var pullRequestId = templateContext.pull_request_data.pull_request_id;
703 703 var commitId = templateContext.commit_data.commit_id;
704 704
705 705 if (pullRequestId) {
706 706 var url = pyroutes.url('pullrequest_comment_delete', {"comment_id": comment_id, "repo_name": templateContext.repo_name, "pull_request_id": pullRequestId})
707 707 } else if (commitId) {
708 708 var url = pyroutes.url('repo_commit_comment_delete', {"comment_id": comment_id, "repo_name": templateContext.repo_name, "commit_id": commitId})
709 709 }
710 710
711 711 var postData = {
712 712 'csrf_token': CSRF_TOKEN
713 713 };
714 714
715 715 $comment.addClass('comment-deleting');
716 716 $comment.hide('fast');
717 717
718 718 var success = function(response) {
719 719 $comment.remove();
720 720
721 721 if (window.updateSticky !== undefined) {
722 722 // potentially our comments change the active window size, so we
723 723 // notify sticky elements
724 724 updateSticky()
725 725 }
726 726
727 727 if (window.refreshAllComments !== undefined && !isDraft) {
728 728 // if we have this handler, run it, and refresh all comments boxes
729 729 refreshAllComments()
730 730 }
731 731 else if (window.refreshDraftComments !== undefined && isDraft) {
732 732 // if we have this handler, run it, and refresh all comments boxes
733 733 refreshDraftComments();
734 734 }
735 735 return false;
736 736 };
737 737
738 738 var failure = function(jqXHR, textStatus, errorThrown) {
739 739 var prefix = "Error while deleting this comment.\n"
740 740 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
741 741 ajaxErrorSwal(message);
742 742
743 743 $comment.show('fast');
744 744 $comment.removeClass('comment-deleting');
745 745 return false;
746 746 };
747 747 ajaxPOST(url, postData, success, failure);
748 748
749 749 }
750 750
751 751 this.deleteComment = function(node) {
752 752 var $comment = $(node).closest('.comment');
753 753 var comment_id = $comment.attr('data-comment-id');
754 754
755 755 SwalNoAnimation.fire({
756 756 title: 'Delete this comment?',
757 757 icon: 'warning',
758 758 showCancelButton: true,
759 759 confirmButtonText: _gettext('Yes, delete comment #{0}!').format(comment_id),
760 760
761 761 }).then(function(result) {
762 762 if (result.value) {
763 763 self._deleteComment(node);
764 764 }
765 765 })
766 766 };
767 767
768 768 this._finalizeDrafts = function(commentIds) {
769 769
770 770 var pullRequestId = templateContext.pull_request_data.pull_request_id;
771 771 var commitId = templateContext.commit_data.commit_id;
772 772
773 773 if (pullRequestId) {
774 774 var url = pyroutes.url('pullrequest_draft_comments_submit', {"repo_name": templateContext.repo_name, "pull_request_id": pullRequestId})
775 775 } else if (commitId) {
776 776 var url = pyroutes.url('commit_draft_comments_submit', {"repo_name": templateContext.repo_name, "commit_id": commitId})
777 777 }
778 778
779 779 // remove the drafts so we can lock them before submit.
780 780 $.each(commentIds, function(idx, val){
781 781 $('#comment-{0}'.format(val)).remove();
782 782 })
783 783
784 784 var postData = {'comments': commentIds, 'csrf_token': CSRF_TOKEN};
785 785
786 786 var submitSuccessCallback = function(json_data) {
787 787 self.attachInlineComment(json_data);
788 788
789 789 if (window.refreshDraftComments !== undefined) {
790 790 // if we have this handler, run it, and refresh all comments boxes
791 791 refreshDraftComments()
792 792 }
793 793
794 794 return false;
795 795 };
796 796
797 797 ajaxPOST(url, postData, submitSuccessCallback)
798 798
799 799 }
800 800
801 801 this.finalizeDrafts = function(commentIds, callback) {
802 802
803 803 SwalNoAnimation.fire({
804 804 title: _ngettext('Submit {0} draft comment.', 'Submit {0} draft comments.', commentIds.length).format(commentIds.length),
805 805 icon: 'warning',
806 806 showCancelButton: true,
807 807 confirmButtonText: _gettext('Yes'),
808 808
809 809 }).then(function(result) {
810 810 if (result.value) {
811 811 if (callback !== undefined) {
812 812 callback(result)
813 813 }
814 814 self._finalizeDrafts(commentIds);
815 815 }
816 816 })
817 817 };
818 818
819 819 this.toggleWideMode = function (node) {
820 820
821 821 if ($('#content').hasClass('wrapper')) {
822 822 $('#content').removeClass("wrapper");
823 823 $('#content').addClass("wide-mode-wrapper");
824 824 $(node).addClass('btn-success');
825 825 return true
826 826 } else {
827 827 $('#content').removeClass("wide-mode-wrapper");
828 828 $('#content').addClass("wrapper");
829 829 $(node).removeClass('btn-success');
830 830 return false
831 831 }
832 832
833 833 };
834 834
835 835 /**
836 836 * Turn off/on all comments in file diff
837 837 */
838 838 this.toggleDiffComments = function(node) {
839 839 // Find closes filediff container
840 840 var $filediff = $(node).closest('.filediff');
841 841 if ($(node).hasClass('toggle-on')) {
842 842 var show = false;
843 843 } else if ($(node).hasClass('toggle-off')) {
844 844 var show = true;
845 845 }
846 846
847 847 // Toggle each individual comment block, so we can un-toggle single ones
848 848 $.each($filediff.find('.toggle-comment-action'), function(idx, val) {
849 849 self.toggleLineComments($(val), show)
850 850 })
851 851
852 852 // since we change the height of the diff container that has anchor points for upper
853 853 // sticky header, we need to tell it to re-calculate those
854 854 if (window.updateSticky !== undefined) {
855 855 // potentially our comments change the active window size, so we
856 856 // notify sticky elements
857 857 updateSticky()
858 858 }
859 859
860 860 return false;
861 861 }
862 862
863 863 this.toggleLineComments = function(node, show) {
864 864
865 865 var trElem = $(node).closest('tr')
866 866
867 867 if (show === true) {
868 868 // mark outdated comments as visible before the toggle;
869 869 $(trElem).find('.comment-outdated').show();
870 870 $(trElem).removeClass('hide-line-comments');
871 871 } else if (show === false) {
872 872 $(trElem).find('.comment-outdated').hide();
873 873 $(trElem).addClass('hide-line-comments');
874 874 } else {
875 875 // mark outdated comments as visible before the toggle;
876 876 $(trElem).find('.comment-outdated').show();
877 877 $(trElem).toggleClass('hide-line-comments');
878 878 }
879 879
880 880 // since we change the height of the diff container that has anchor points for upper
881 881 // sticky header, we need to tell it to re-calculate those
882 882 if (window.updateSticky !== undefined) {
883 883 // potentially our comments change the active window size, so we
884 884 // notify sticky elements
885 885 updateSticky()
886 886 }
887 887
888 888 };
889 889
890 890 this.createCommentForm = function(formElement, lineno, placeholderText, initAutocompleteActions, resolvesCommentId, edit, comment_id){
891 891 var pullRequestId = templateContext.pull_request_data.pull_request_id;
892 892 var commitId = templateContext.commit_data.commit_id;
893 893
894 894 var commentForm = new CommentForm(
895 895 formElement, commitId, pullRequestId, lineno, initAutocompleteActions, resolvesCommentId, edit, comment_id);
896 896 var cm = commentForm.getCmInstance();
897 897
898 898 if (resolvesCommentId){
899 899 placeholderText = _gettext('Leave a resolution comment, or click resolve button to resolve TODO comment #{0}').format(resolvesCommentId);
900 900 }
901 901
902 902 setTimeout(function() {
903 903 // callbacks
904 904 if (cm !== undefined) {
905 905 commentForm.setPlaceholder(placeholderText);
906 906 if (commentForm.isInline()) {
907 907 cm.focus();
908 908 cm.refresh();
909 909 }
910 910 }
911 911 }, 10);
912 912
913 913 // trigger scrolldown to the resolve comment, since it might be away
914 914 // from the clicked
915 915 if (resolvesCommentId){
916 916 var actionNode = $(commentForm.resolvesActionId).offset();
917 917
918 918 setTimeout(function() {
919 919 if (actionNode) {
920 920 $('body, html').animate({scrollTop: actionNode.top}, 10);
921 921 }
922 922 }, 100);
923 923 }
924 924
925 925 // add dropzone support
926 926 var insertAttachmentText = function (cm, attachmentName, attachmentStoreUrl, isRendered) {
927 927 var renderer = templateContext.visual.default_renderer;
928 928 if (renderer == 'rst') {
929 929 var attachmentUrl = '`#{0} <{1}>`_'.format(attachmentName, attachmentStoreUrl);
930 930 if (isRendered){
931 931 attachmentUrl = '\n.. image:: {0}'.format(attachmentStoreUrl);
932 932 }
933 933 } else if (renderer == 'markdown') {
934 934 var attachmentUrl = '[{0}]({1})'.format(attachmentName, attachmentStoreUrl);
935 935 if (isRendered){
936 936 attachmentUrl = '!' + attachmentUrl;
937 937 }
938 938 } else {
939 939 var attachmentUrl = '{}'.format(attachmentStoreUrl);
940 940 }
941 941 cm.replaceRange(attachmentUrl+'\n', CodeMirror.Pos(cm.lastLine()));
942 942
943 943 return false;
944 944 };
945 945
946 946 //see: https://www.dropzonejs.com/#configuration
947 947 var storeUrl = pyroutes.url('repo_commit_comment_attachment_upload',
948 948 {'repo_name': templateContext.repo_name,
949 949 'commit_id': templateContext.commit_data.commit_id})
950 950
951 951 var previewTmpl = $(formElement).find('.comment-attachment-uploader-template').get(0);
952 952 if (previewTmpl !== undefined){
953 953 var selectLink = $(formElement).find('.pick-attachment').get(0);
954 954 $(formElement).find('.comment-attachment-uploader').dropzone({
955 955 url: storeUrl,
956 956 headers: {"X-CSRF-Token": CSRF_TOKEN},
957 957 paramName: function () {
958 958 return "attachment"
959 959 }, // The name that will be used to transfer the file
960 960 clickable: selectLink,
961 961 parallelUploads: 1,
962 962 maxFiles: 10,
963 963 maxFilesize: templateContext.attachment_store.max_file_size_mb,
964 964 uploadMultiple: false,
965 965 autoProcessQueue: true, // if false queue will not be processed automatically.
966 966 createImageThumbnails: false,
967 967 previewTemplate: previewTmpl.innerHTML,
968 968
969 969 accept: function (file, done) {
970 970 done();
971 971 },
972 972 init: function () {
973 973
974 974 this.on("sending", function (file, xhr, formData) {
975 975 $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').hide();
976 976 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').show();
977 977 });
978 978
979 979 this.on("success", function (file, response) {
980 980 $(formElement).find('.comment-attachment-uploader').find('.dropzone-text').show();
981 981 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide();
982 982
983 983 var isRendered = false;
984 984 var ext = file.name.split('.').pop();
985 985 var imageExts = templateContext.attachment_store.image_ext;
986 986 if (imageExts.indexOf(ext) !== -1){
987 987 isRendered = true;
988 988 }
989 989
990 990 insertAttachmentText(cm, file.name, response.repo_fqn_access_path, isRendered)
991 991 });
992 992
993 993 this.on("error", function (file, errorMessage, xhr) {
994 994 $(formElement).find('.comment-attachment-uploader').find('.dropzone-upload').hide();
995 995
996 996 var error = null;
997 997
998 998 if (xhr !== undefined){
999 999 var httpStatus = xhr.status + " " + xhr.statusText;
1000 1000 if (xhr !== undefined && xhr.status >= 500) {
1001 1001 error = httpStatus;
1002 1002 }
1003 1003 }
1004 1004
1005 1005 if (error === null) {
1006 1006 error = errorMessage.error || errorMessage || httpStatus;
1007 1007 }
1008 1008 $(file.previewElement).find('.dz-error-message').html('ERROR: {0}'.format(error));
1009 1009
1010 1010 });
1011 1011 }
1012 1012 });
1013 1013 }
1014 1014 return commentForm;
1015 1015 };
1016 1016
1017 1017 this.createGeneralComment = function (lineNo, placeholderText, resolvesCommentId) {
1018 1018
1019 1019 var tmpl = $('#cb-comment-general-form-template').html();
1020 1020 tmpl = tmpl.format(null, 'general');
1021 1021 var $form = $(tmpl);
1022 1022
1023 1023 var $formPlaceholder = $('#cb-comment-general-form-placeholder');
1024 1024 var curForm = $formPlaceholder.find('form');
1025 1025 if (curForm){
1026 1026 curForm.remove();
1027 1027 }
1028 1028 $formPlaceholder.append($form);
1029 1029
1030 1030 var _form = $($form[0]);
1031 1031 var autocompleteActions = ['approve', 'reject', 'as_note', 'as_todo'];
1032 1032 var edit = false;
1033 1033 var comment_id = null;
1034 1034 var commentForm = this.createCommentForm(
1035 1035 _form, lineNo, placeholderText, autocompleteActions, resolvesCommentId, edit, comment_id);
1036 1036 commentForm.initStatusChangeSelector();
1037 1037
1038 1038 return commentForm;
1039 1039 };
1040 1040
1041 1041 this.editComment = function(node, line_no, f_path) {
1042 1042 self.edit = true;
1043 1043 var $node = $(node);
1044 1044 var $td = $node.closest('td');
1045 1045
1046 1046 var $comment = $(node).closest('.comment');
1047 1047 var comment_id = $($comment).data('commentId');
1048 1048 var isDraft = $($comment).data('commentDraft');
1049 1049 var $editForm = null
1050 1050
1051 1051 var $comments = $node.closest('div.inline-comments');
1052 1052 var $general_comments = null;
1053 1053
1054 1054 if($comments.length){
1055 1055 // inline comments setup
1056 1056 $editForm = $comments.find('.comment-inline-form');
1057 1057 line_no = self.getLineNumber(node)
1058 1058 }
1059 1059 else{
1060 1060 // general comments setup
1061 1061 $comments = $('#comments');
1062 1062 $editForm = $comments.find('.comment-inline-form');
1063 1063 line_no = $comment[0].id
1064 1064 $('#cb-comment-general-form-placeholder').hide();
1065 1065 }
1066 1066
1067 1067 if ($editForm.length === 0) {
1068 1068
1069 1069 // unhide all comments if they are hidden for a proper REPLY mode
1070 1070 var $filediff = $node.closest('.filediff');
1071 1071 $filediff.removeClass('hide-comments');
1072 1072
1073 1073 $editForm = self.createNewFormWrapper(f_path, line_no);
1074 1074 if(f_path && line_no) {
1075 1075 $editForm.addClass('comment-inline-form-edit')
1076 1076 }
1077 1077
1078 1078 $comment.after($editForm)
1079 1079
1080 1080 var _form = $($editForm[0]).find('form');
1081 1081 var autocompleteActions = ['as_note',];
1082 1082 var commentForm = this.createCommentForm(
1083 1083 _form, line_no, '', autocompleteActions, resolvesCommentId,
1084 1084 this.edit, comment_id);
1085 1085 var old_comment_text_binary = $comment.attr('data-comment-text');
1086 1086 var old_comment_text = b64DecodeUnicode(old_comment_text_binary);
1087 1087 commentForm.cm.setValue(old_comment_text);
1088 1088 $comment.hide();
1089 1089 tooltipActivate();
1090 1090
1091 1091 // set a CUSTOM submit handler for inline comment edit action.
1092 1092 commentForm.setHandleFormSubmit(function(o) {
1093 1093 var text = commentForm.cm.getValue();
1094 1094 var commentType = commentForm.getCommentType();
1095 1095
1096 1096 if (text === "") {
1097 1097 return;
1098 1098 }
1099 1099
1100 1100 if (old_comment_text == text) {
1101 1101 SwalNoAnimation.fire({
1102 1102 title: 'Unable to edit comment',
1103 1103 html: _gettext('Comment body was not changed.'),
1104 1104 });
1105 1105 return;
1106 1106 }
1107 1107 var excludeCancelBtn = false;
1108 1108 var submitEvent = true;
1109 1109 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
1110 1110 commentForm.cm.setOption("readOnly", true);
1111 1111
1112 1112 // Read last version known
1113 1113 var versionSelector = $('#comment_versions_{0}'.format(comment_id));
1114 1114 var version = versionSelector.data('lastVersion');
1115 1115
1116 1116 if (!version) {
1117 1117 version = 0;
1118 1118 }
1119 1119
1120 1120 var postData = {
1121 1121 'text': text,
1122 1122 'f_path': f_path,
1123 1123 'line': line_no,
1124 1124 'comment_type': commentType,
1125 1125 'draft': isDraft,
1126 1126 'version': version,
1127 1127 'csrf_token': CSRF_TOKEN
1128 1128 };
1129 1129
1130 1130 var submitSuccessCallback = function(json_data) {
1131 1131 $editForm.remove();
1132 1132 $comment.show();
1133 1133 var postData = {
1134 1134 'text': text,
1135 1135 'renderer': $comment.attr('data-comment-renderer'),
1136 1136 'csrf_token': CSRF_TOKEN
1137 1137 };
1138 1138
1139 1139 /* Inject new edited version selector */
1140 1140 var updateCommentVersionDropDown = function () {
1141 1141 var versionSelectId = '#comment_versions_'+comment_id;
1142 1142 var preLoadVersionData = [
1143 1143 {
1144 1144 id: json_data['comment_version'],
1145 1145 text: "v{0}".format(json_data['comment_version']),
1146 1146 action: function () {
1147 1147 Rhodecode.comments.showVersion(
1148 1148 json_data['comment_id'],
1149 1149 json_data['comment_history_id']
1150 1150 )
1151 1151 },
1152 1152 comment_version: json_data['comment_version'],
1153 1153 comment_author_username: json_data['comment_author_username'],
1154 1154 comment_author_gravatar: json_data['comment_author_gravatar'],
1155 1155 comment_created_on: json_data['comment_created_on'],
1156 1156 },
1157 1157 ]
1158 1158
1159 1159
1160 1160 if ($(versionSelectId).data('select2')) {
1161 1161 var oldData = $(versionSelectId).data('select2').opts.data.results;
1162 1162 $(versionSelectId).select2("destroy");
1163 1163 preLoadVersionData = oldData.concat(preLoadVersionData)
1164 1164 }
1165 1165
1166 1166 initVersionSelector(versionSelectId, {results: preLoadVersionData});
1167 1167
1168 1168 $comment.attr('data-comment-text', utf8ToB64(text));
1169 1169
1170 1170 var versionSelector = $('#comment_versions_'+comment_id);
1171 1171
1172 1172 // set lastVersion so we know our last edit version
1173 1173 versionSelector.data('lastVersion', json_data['comment_version'])
1174 1174 versionSelector.parent().show();
1175 1175 }
1176 1176 updateCommentVersionDropDown();
1177 1177
1178 1178 // by default we reset state of comment preserving the text
1179 1179 var failRenderCommit = function(jqXHR, textStatus, errorThrown) {
1180 1180 var prefix = "Error while editing this comment.\n"
1181 1181 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1182 1182 ajaxErrorSwal(message);
1183 1183 };
1184 1184
1185 1185 var successRenderCommit = function(o){
1186 1186 $comment.show();
1187 1187 $comment[0].lastElementChild.innerHTML = o;
1188 1188 };
1189 1189
1190 1190 var previewUrl = pyroutes.url(
1191 1191 'repo_commit_comment_preview',
1192 1192 {'repo_name': templateContext.repo_name,
1193 1193 'commit_id': templateContext.commit_data.commit_id});
1194 1194
1195 1195 _submitAjaxPOST(
1196 1196 previewUrl, postData, successRenderCommit, failRenderCommit
1197 1197 );
1198 1198
1199 1199 try {
1200 1200 var html = json_data.rendered_text;
1201 1201 var lineno = json_data.line_no;
1202 1202 var target_id = json_data.target_id;
1203 1203
1204 1204 $comments.find('.cb-comment-add-button').before(html);
1205 1205
1206 1206 // run global callback on submit
1207 1207 commentForm.globalSubmitSuccessCallback({draft: isDraft, comment_id: comment_id});
1208 1208
1209 1209 } catch (e) {
1210 1210 console.error(e);
1211 1211 }
1212 1212
1213 1213 // re trigger the linkification of next/prev navigation
1214 1214 linkifyComments($('.inline-comment-injected'));
1215 1215 timeagoActivate();
1216 1216 tooltipActivate();
1217 1217
1218 1218 if (window.updateSticky !== undefined) {
1219 1219 // potentially our comments change the active window size, so we
1220 1220 // notify sticky elements
1221 1221 updateSticky()
1222 1222 }
1223 1223
1224 1224 if (window.refreshAllComments !== undefined && !isDraft) {
1225 1225 // if we have this handler, run it, and refresh all comments boxes
1226 1226 refreshAllComments()
1227 1227 }
1228 1228 else if (window.refreshDraftComments !== undefined && isDraft) {
1229 1229 // if we have this handler, run it, and refresh all comments boxes
1230 1230 refreshDraftComments();
1231 1231 }
1232 1232
1233 1233 commentForm.setActionButtonsDisabled(false);
1234 1234
1235 1235 };
1236 1236
1237 1237 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
1238 1238 var prefix = "Error while editing comment.\n"
1239 1239 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1240 1240 if (jqXHR.status == 409){
1241 1241 message = 'This comment was probably changed somewhere else. Please reload the content of this comment.'
1242 1242 ajaxErrorSwal(message, 'Comment version mismatch.');
1243 1243 } else {
1244 1244 ajaxErrorSwal(message);
1245 1245 }
1246 1246
1247 1247 commentForm.resetCommentFormState(text)
1248 1248 };
1249 1249 commentForm.submitAjaxPOST(
1250 1250 commentForm.submitUrl, postData,
1251 1251 submitSuccessCallback,
1252 1252 submitFailCallback);
1253 1253 });
1254 1254 }
1255 1255
1256 1256 $editForm.addClass('comment-inline-form-open');
1257 1257 };
1258 1258
1259 1259 this.attachComment = function(json_data) {
1260 1260 var self = this;
1261 1261 $.each(json_data, function(idx, val) {
1262 1262 var json_data_elem = [val]
1263 1263 var isInline = val.comment_f_path && val.comment_lineno
1264 1264
1265 1265 if (isInline) {
1266 1266 self.attachInlineComment(json_data_elem)
1267 1267 } else {
1268 1268 self.attachGeneralComment(json_data_elem)
1269 1269 }
1270 1270 })
1271 1271
1272 1272 }
1273 1273
1274 1274 this.attachGeneralComment = function(json_data) {
1275 1275 $.each(json_data, function(idx, val) {
1276 1276 $('#injected_page_comments').append(val.rendered_text);
1277 1277 })
1278 1278 }
1279 1279
1280 1280 this.attachInlineComment = function(json_data) {
1281 1281
1282 1282 $.each(json_data, function (idx, val) {
1283 1283 var line_qry = '*[data-line-no="{0}"]'.format(val.line_no);
1284 1284 var html = val.rendered_text;
1285 1285 var $inlineComments = $('#' + val.target_id)
1286 1286 .find(line_qry)
1287 1287 .find('.inline-comments');
1288 1288
1289 1289 var lastComment = $inlineComments.find('.comment-inline').last();
1290 1290
1291 1291 if (lastComment.length === 0) {
1292 1292 // first comment, we append simply
1293 1293 $inlineComments.find('.reply-thread-container-wrapper').before(html);
1294 1294 } else {
1295 1295 $(lastComment).after(html)
1296 1296 }
1297 1297
1298 1298 })
1299 1299
1300 1300 };
1301 1301
1302 1302 this.createNewFormWrapper = function(f_path, line_no) {
1303 1303 // create a new reply HTML form from template
1304 1304 var tmpl = $('#cb-comment-inline-form-template').html();
1305 1305 tmpl = tmpl.format(escapeHtml(f_path), line_no);
1306 1306 return $(tmpl);
1307 1307 }
1308 1308
1309 1309 this.markCommentResolved = function(commentId) {
1310 1310 $('#comment-label-{0}'.format(commentId)).find('.resolved').show();
1311 1311 $('#comment-label-{0}'.format(commentId)).find('.resolve').hide();
1312 1312 };
1313 1313
1314 1314 this.createComment = function(node, f_path, line_no, resolutionComment) {
1315 1315 self.edit = false;
1316 1316 var $node = $(node);
1317 1317 var $td = $node.closest('td');
1318 1318 var resolvesCommentId = resolutionComment || null;
1319 1319
1320 1320 var $replyForm = $td.find('.comment-inline-form');
1321 1321
1322 1322 // if form isn't existing, we're generating a new one and injecting it.
1323 1323 if ($replyForm.length === 0) {
1324 1324
1325 1325 // unhide/expand all comments if they are hidden for a proper REPLY mode
1326 1326 self.toggleLineComments($node, true);
1327 1327
1328 1328 $replyForm = self.createNewFormWrapper(f_path, line_no);
1329 1329
1330 1330 var $comments = $td.find('.inline-comments');
1331 1331
1332 1332 // There aren't any comments, we init the `.inline-comments` with `reply-thread-container` first
1333 1333 if ($comments.length===0) {
1334 var replBtn = '<button class="cb-comment-add-button" onclick="return Rhodecode.comments.createComment(this, \'{0}\', \'{1}\', null)">Reply...</button>'.format(f_path, line_no)
1334 var replBtn = '<button class="cb-comment-add-button" onclick="return Rhodecode.comments.createComment(this, \'{0}\', \'{1}\', null)">Reply...</button>'.format(escapeHtml(f_path), line_no)
1335 1335 var $reply_container = $('#cb-comments-inline-container-template')
1336 1336 $reply_container.find('button.cb-comment-add-button').replaceWith(replBtn);
1337 1337 $td.append($($reply_container).html());
1338 1338 }
1339 1339
1340 1340 // default comment button exists, so we prepend the form for leaving initial comment
1341 1341 $td.find('.cb-comment-add-button').before($replyForm);
1342 1342 // set marker, that we have a open form
1343 1343 var $replyWrapper = $td.find('.reply-thread-container-wrapper')
1344 1344 $replyWrapper.addClass('comment-form-active');
1345 1345
1346 1346 var lastComment = $comments.find('.comment-inline').last();
1347 1347 if ($(lastComment).hasClass('comment-outdated')) {
1348 1348 $replyWrapper.show();
1349 1349 }
1350 1350
1351 1351 var _form = $($replyForm[0]).find('form');
1352 1352 var autocompleteActions = ['as_note', 'as_todo'];
1353 1353 var comment_id=null;
1354 1354 var placeholderText = _gettext('Leave a comment on file {0} line {1}.').format(f_path, line_no);
1355 1355 var commentForm = self.createCommentForm(
1356 1356 _form, line_no, placeholderText, autocompleteActions, resolvesCommentId,
1357 1357 self.edit, comment_id);
1358 1358
1359 1359 // set a CUSTOM submit handler for inline comments.
1360 1360 commentForm.setHandleFormSubmit(function(o) {
1361 1361 var text = commentForm.cm.getValue();
1362 1362 var commentType = commentForm.getCommentType();
1363 1363 var resolvesCommentId = commentForm.getResolvesId();
1364 1364 var isDraft = commentForm.getDraftState();
1365 1365
1366 1366 if (text === "") {
1367 1367 return;
1368 1368 }
1369 1369
1370 1370 if (line_no === undefined) {
1371 1371 alert('Error: unable to fetch line number for this inline comment !');
1372 1372 return;
1373 1373 }
1374 1374
1375 1375 if (f_path === undefined) {
1376 1376 alert('Error: unable to fetch file path for this inline comment !');
1377 1377 return;
1378 1378 }
1379 1379
1380 1380 var excludeCancelBtn = false;
1381 1381 var submitEvent = true;
1382 1382 commentForm.setActionButtonsDisabled(true, excludeCancelBtn, submitEvent);
1383 1383 commentForm.cm.setOption("readOnly", true);
1384 1384 var postData = {
1385 1385 'text': text,
1386 1386 'f_path': f_path,
1387 1387 'line': line_no,
1388 1388 'comment_type': commentType,
1389 1389 'draft': isDraft,
1390 1390 'csrf_token': CSRF_TOKEN
1391 1391 };
1392 1392 if (resolvesCommentId){
1393 1393 postData['resolves_comment_id'] = resolvesCommentId;
1394 1394 }
1395 1395
1396 1396 // submitSuccess for inline commits
1397 1397 var submitSuccessCallback = function(json_data) {
1398 1398
1399 1399 $replyForm.remove();
1400 1400 $td.find('.reply-thread-container-wrapper').removeClass('comment-form-active');
1401 1401
1402 1402 try {
1403 1403
1404 1404 // inject newly created comments, json_data is {<comment_id>: {}}
1405 1405 self.attachInlineComment(json_data)
1406 1406
1407 1407 //mark visually which comment was resolved
1408 1408 if (resolvesCommentId) {
1409 1409 self.markCommentResolved(resolvesCommentId);
1410 1410 }
1411 1411
1412 1412 // run global callback on submit
1413 1413 commentForm.globalSubmitSuccessCallback({
1414 1414 draft: isDraft,
1415 1415 comment_id: comment_id
1416 1416 });
1417 1417
1418 1418 } catch (e) {
1419 1419 console.error(e);
1420 1420 }
1421 1421
1422 1422 if (window.updateSticky !== undefined) {
1423 1423 // potentially our comments change the active window size, so we
1424 1424 // notify sticky elements
1425 1425 updateSticky()
1426 1426 }
1427 1427
1428 1428 if (window.refreshAllComments !== undefined && !isDraft) {
1429 1429 // if we have this handler, run it, and refresh all comments boxes
1430 1430 refreshAllComments()
1431 1431 }
1432 1432 else if (window.refreshDraftComments !== undefined && isDraft) {
1433 1433 // if we have this handler, run it, and refresh all comments boxes
1434 1434 refreshDraftComments();
1435 1435 }
1436 1436
1437 1437 commentForm.setActionButtonsDisabled(false);
1438 1438
1439 1439 // re trigger the linkification of next/prev navigation
1440 1440 linkifyComments($('.inline-comment-injected'));
1441 1441 timeagoActivate();
1442 1442 tooltipActivate();
1443 1443 };
1444 1444
1445 1445 var submitFailCallback = function(jqXHR, textStatus, errorThrown) {
1446 1446 var prefix = "Error while submitting comment.\n"
1447 1447 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1448 1448 ajaxErrorSwal(message);
1449 1449 commentForm.resetCommentFormState(text)
1450 1450 };
1451 1451
1452 1452 commentForm.submitAjaxPOST(
1453 1453 commentForm.submitUrl, postData, submitSuccessCallback, submitFailCallback);
1454 1454 });
1455 1455 }
1456 1456
1457 1457 // Finally "open" our reply form, since we know there are comments and we have the "attached" old form
1458 1458 $replyForm.addClass('comment-inline-form-open');
1459 1459 tooltipActivate();
1460 1460 };
1461 1461
1462 1462 this.createResolutionComment = function(commentId){
1463 1463 // hide the trigger text
1464 1464 $('#resolve-comment-{0}'.format(commentId)).hide();
1465 1465
1466 1466 var comment = $('#comment-'+commentId);
1467 1467 var commentData = comment.data();
1468 1468
1469 1469 if (commentData.commentInline) {
1470 1470 var f_path = commentData.commentFPath;
1471 1471 var line_no = commentData.commentLineNo;
1472 1472 this.createComment(comment, f_path, line_no, commentId)
1473 1473 } else {
1474 1474 this.createGeneralComment('general', "$placeholder", commentId)
1475 1475 }
1476 1476
1477 1477 return false;
1478 1478 };
1479 1479
1480 1480 this.submitResolution = function(commentId){
1481 1481 var form = $('#resolve_comment_{0}'.format(commentId)).closest('form');
1482 1482 var commentForm = form.get(0).CommentForm;
1483 1483
1484 1484 var cm = commentForm.getCmInstance();
1485 1485 var renderer = templateContext.visual.default_renderer;
1486 1486 if (renderer == 'rst'){
1487 1487 var commentUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentForm.selfUrl);
1488 1488 } else if (renderer == 'markdown') {
1489 1489 var commentUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentForm.selfUrl);
1490 1490 } else {
1491 1491 var commentUrl = '{1}#comment-{0}'.format(commentId, commentForm.selfUrl);
1492 1492 }
1493 1493
1494 1494 cm.setValue(_gettext('TODO from comment {0} was fixed.').format(commentUrl));
1495 1495 form.submit();
1496 1496 return false;
1497 1497 };
1498 1498
1499 1499 this.resolveTodo = function (elem, todoId) {
1500 1500 var commentId = todoId;
1501 1501
1502 1502 SwalNoAnimation.fire({
1503 1503 title: 'Resolve TODO {0}'.format(todoId),
1504 1504 showCancelButton: true,
1505 1505 confirmButtonText: _gettext('Yes'),
1506 1506 showLoaderOnConfirm: true,
1507 1507
1508 1508 allowOutsideClick: function () {
1509 1509 !Swal.isLoading()
1510 1510 },
1511 1511 preConfirm: function () {
1512 1512 var comment = $('#comment-' + commentId);
1513 1513 var commentData = comment.data();
1514 1514
1515 1515 var f_path = null
1516 1516 var line_no = null
1517 1517 if (commentData.commentInline) {
1518 1518 f_path = commentData.commentFPath;
1519 1519 line_no = commentData.commentLineNo;
1520 1520 }
1521 1521
1522 1522 var renderer = templateContext.visual.default_renderer;
1523 1523 var commentBoxUrl = '{1}#comment-{0}'.format(commentId);
1524 1524
1525 1525 // Pull request case
1526 1526 if (templateContext.pull_request_data.pull_request_id !== null) {
1527 1527 var commentUrl = pyroutes.url('pullrequest_comment_create',
1528 1528 {
1529 1529 'repo_name': templateContext.repo_name,
1530 1530 'pull_request_id': templateContext.pull_request_data.pull_request_id,
1531 1531 'comment_id': commentId
1532 1532 });
1533 1533 } else {
1534 1534 var commentUrl = pyroutes.url('repo_commit_comment_create',
1535 1535 {
1536 1536 'repo_name': templateContext.repo_name,
1537 1537 'commit_id': templateContext.commit_data.commit_id,
1538 1538 'comment_id': commentId
1539 1539 });
1540 1540 }
1541 1541
1542 1542 if (renderer === 'rst') {
1543 1543 commentBoxUrl = '`#{0} <{1}#comment-{0}>`_'.format(commentId, commentUrl);
1544 1544 } else if (renderer === 'markdown') {
1545 1545 commentBoxUrl = '[#{0}]({1}#comment-{0})'.format(commentId, commentUrl);
1546 1546 }
1547 1547 var resolveText = _gettext('TODO from comment {0} was fixed.').format(commentBoxUrl);
1548 1548
1549 1549 var postData = {
1550 1550 text: resolveText,
1551 1551 comment_type: 'note',
1552 1552 draft: false,
1553 1553 csrf_token: CSRF_TOKEN,
1554 1554 resolves_comment_id: commentId
1555 1555 }
1556 1556 if (commentData.commentInline) {
1557 1557 postData['f_path'] = f_path;
1558 1558 postData['line'] = line_no;
1559 1559 }
1560 1560
1561 1561 return new Promise(function (resolve, reject) {
1562 1562 $.ajax({
1563 1563 type: 'POST',
1564 1564 data: postData,
1565 1565 url: commentUrl,
1566 1566 headers: {'X-PARTIAL-XHR': true}
1567 1567 })
1568 1568 .done(function (data) {
1569 1569 resolve(data);
1570 1570 })
1571 1571 .fail(function (jqXHR, textStatus, errorThrown) {
1572 1572 var prefix = "Error while resolving TODO.\n"
1573 1573 var message = formatErrorMessage(jqXHR, textStatus, errorThrown, prefix);
1574 1574 ajaxErrorSwal(message);
1575 1575 });
1576 1576 })
1577 1577 }
1578 1578
1579 1579 })
1580 1580 .then(function (result) {
1581 1581 var success = function (json_data) {
1582 1582 resolvesCommentId = commentId;
1583 1583 var commentResolved = json_data[Object.keys(json_data)[0]]
1584 1584
1585 1585 try {
1586 1586
1587 1587 if (commentResolved.f_path) {
1588 1588 // inject newly created comments, json_data is {<comment_id>: {}}
1589 1589 self.attachInlineComment(json_data)
1590 1590 } else {
1591 1591 self.attachGeneralComment(json_data)
1592 1592 }
1593 1593
1594 1594 //mark visually which comment was resolved
1595 1595 if (resolvesCommentId) {
1596 1596 self.markCommentResolved(resolvesCommentId);
1597 1597 }
1598 1598
1599 1599 // run global callback on submit
1600 1600 if (window.commentFormGlobalSubmitSuccessCallback !== undefined) {
1601 1601 commentFormGlobalSubmitSuccessCallback({
1602 1602 draft: false,
1603 1603 comment_id: commentId
1604 1604 });
1605 1605 }
1606 1606
1607 1607 } catch (e) {
1608 1608 console.error(e);
1609 1609 }
1610 1610
1611 1611 if (window.updateSticky !== undefined) {
1612 1612 // potentially our comments change the active window size, so we
1613 1613 // notify sticky elements
1614 1614 updateSticky()
1615 1615 }
1616 1616
1617 1617 if (window.refreshAllComments !== undefined) {
1618 1618 // if we have this handler, run it, and refresh all comments boxes
1619 1619 refreshAllComments()
1620 1620 }
1621 1621 // re trigger the linkification of next/prev navigation
1622 1622 linkifyComments($('.inline-comment-injected'));
1623 1623 timeagoActivate();
1624 1624 tooltipActivate();
1625 1625 };
1626 1626
1627 1627 if (result.value) {
1628 1628 $(elem).remove();
1629 1629 success(result.value)
1630 1630 }
1631 1631 })
1632 1632 };
1633 1633
1634 1634 };
1635 1635
1636 1636 window.commentHelp = function(renderer) {
1637 1637 var funcData = {'renderer': renderer}
1638 1638 return renderTemplate('commentHelpHovercard', funcData)
1639 1639 }
@@ -1,1262 +1,1263 b''
1 1 ## -*- coding: utf-8 -*-
2 2
3 3 <%!
4 4 from rhodecode.lib import html_filters
5 5 %>
6 6
7 7 <%inherit file="root.mako"/>
8 8
9 9 <%include file="/ejs_templates/templates.html"/>
10 10
11 11 <div class="outerwrapper">
12 12 <!-- HEADER -->
13 13 <div class="header">
14 14 <div id="header-inner" class="wrapper">
15 15 <div id="logo">
16 16 <div class="logo-wrapper">
17 17 <a href="${h.route_path('home')}"><img src="${h.asset('images/rhodecode-logo-white-60x60.png')}" alt="RhodeCode"/></a>
18 18 </div>
19 19 % if c.rhodecode_name:
20 20 <div class="branding">
21 21 <a href="${h.route_path('home')}">${h.branding(c.rhodecode_name)}</a>
22 22 </div>
23 23 % endif
24 24 </div>
25 25 <!-- MENU BAR NAV -->
26 26 ${self.menu_bar_nav()}
27 27 <!-- END MENU BAR NAV -->
28 28 </div>
29 29 </div>
30 30 ${self.menu_bar_subnav()}
31 31 <!-- END HEADER -->
32 32
33 33 <!-- CONTENT -->
34 34 <div id="content" class="wrapper">
35 35
36 36 <rhodecode-toast id="notifications"></rhodecode-toast>
37 37
38 38 <div class="main">
39 39 ${next.main()}
40 40 </div>
41 41
42 42 </div>
43 43 <!-- END CONTENT -->
44 44
45 45 </div>
46 46
47 47 <!-- FOOTER -->
48 48 <div id="footer">
49 49 <div id="footer-inner" class="title wrapper">
50 50 <div>
51 51 <% sid = 'block' if request.GET.get('showrcid') else 'none' %>
52 52
53 53 <p class="footer-link-right">
54 54 <a class="grey-link-action" href="${h.route_path('home', _query={'showrcid': 1})}">
55 55 RhodeCode
56 56 % if c.visual.show_version:
57 57 ${c.rhodecode_version}
58 58 % endif
59 59 ${c.rhodecode_edition}
60 60 </a> |
61 61
62 62 % if c.visual.rhodecode_support_url:
63 63 <a class="grey-link-action" href="${c.visual.rhodecode_support_url}" target="_blank">${_('Support')}</a> |
64 64 <a class="grey-link-action" href="https://docs.rhodecode.com" target="_blank">${_('Documentation')}</a>
65 65 % endif
66 66
67 67 </p>
68 68
69 69 <p class="server-instance" style="display:${sid}">
70 70 ## display hidden instance ID if specially defined
71 71 &copy; 2010-${h.datetime.today().year}, <a href="${h.route_url('rhodecode_official')}" target="_blank">RhodeCode GmbH</a>. All rights reserved.
72 72 % if c.rhodecode_instanceid:
73 73 ${_('RhodeCode instance id: {}').format(c.rhodecode_instanceid)}
74 74 % endif
75 75 </p>
76 76 </div>
77 77 </div>
78 78 </div>
79 79
80 80 <!-- END FOOTER -->
81 81
82 82 ### MAKO DEFS ###
83 83
84 84 <%def name="menu_bar_subnav()">
85 85 </%def>
86 86
87 87 <%def name="breadcrumbs(class_='breadcrumbs')">
88 88 <div class="${class_}">
89 89 ${self.breadcrumbs_links()}
90 90 </div>
91 91 </%def>
92 92
93 93 <%def name="admin_menu(active=None)">
94 94
95 95 <div id="context-bar">
96 96 <div class="wrapper">
97 97 <div class="title">
98 98 <div class="title-content">
99 99 <div class="title-main">
100 100 % if c.is_super_admin:
101 101 ${_('Super-admin Panel')}
102 102 % else:
103 103 ${_('Delegated Admin Panel')}
104 104 % endif
105 105 </div>
106 106 </div>
107 107 </div>
108 108
109 109 <ul id="context-pages" class="navigation horizontal-list">
110 110
111 111 ## super-admin case
112 112 % if c.is_super_admin:
113 113 <li class="${h.is_active('audit_logs', active)}"><a href="${h.route_path('admin_audit_logs')}">${_('Admin audit logs')}</a></li>
114 114 <li class="${h.is_active('repositories', active)}"><a href="${h.route_path('repos')}">${_('Repositories')}</a></li>
115 115 <li class="${h.is_active('repository_groups', active)}"><a href="${h.route_path('repo_groups')}">${_('Repository groups')}</a></li>
116 116 <li class="${h.is_active('users', active)}"><a href="${h.route_path('users')}">${_('Users')}</a></li>
117 117 <li class="${h.is_active('user_groups', active)}"><a href="${h.route_path('user_groups')}">${_('User groups')}</a></li>
118 <li class="${h.is_active('artifacts', active)}"><a href="${h.route_path('admin_artifacts')}">${_('Artifacts')}</a></li>
118 119 <li class="${h.is_active('permissions', active)}"><a href="${h.route_path('admin_permissions_application')}">${_('Permissions')}</a></li>
119 120 <li class="${h.is_active('authentication', active)}"><a href="${h.route_path('auth_home', traverse='')}">${_('Authentication')}</a></li>
120 121 <li class="${h.is_active('integrations', active)}"><a href="${h.route_path('global_integrations_home')}">${_('Integrations')}</a></li>
121 122 <li class="${h.is_active('defaults', active)}"><a href="${h.route_path('admin_defaults_repositories')}">${_('Defaults')}</a></li>
122 123 <li class="${h.is_active('settings', active)}"><a href="${h.route_path('admin_settings')}">${_('Settings')}</a></li>
123 124
124 125 ## delegated admin
125 126 % elif c.is_delegated_admin:
126 127 <%
127 128 repositories=c.auth_user.repositories_admin or c.can_create_repo
128 129 repository_groups=c.auth_user.repository_groups_admin or c.can_create_repo_group
129 130 user_groups=c.auth_user.user_groups_admin or c.can_create_user_group
130 131 %>
131 132
132 133 %if repositories:
133 134 <li class="${h.is_active('repositories', active)} local-admin-repos"><a href="${h.route_path('repos')}">${_('Repositories')}</a></li>
134 135 %endif
135 136 %if repository_groups:
136 137 <li class="${h.is_active('repository_groups', active)} local-admin-repo-groups"><a href="${h.route_path('repo_groups')}">${_('Repository groups')}</a></li>
137 138 %endif
138 139 %if user_groups:
139 140 <li class="${h.is_active('user_groups', active)} local-admin-user-groups"><a href="${h.route_path('user_groups')}">${_('User groups')}</a></li>
140 141 %endif
141 142 % endif
142 143 </ul>
143 144
144 145 </div>
145 146 <div class="clear"></div>
146 147 </div>
147 148 </%def>
148 149
149 150 <%def name="dt_info_panel(elements)">
150 151 <dl class="dl-horizontal">
151 152 %for dt, dd, title, show_items in elements:
152 153 <dt>${dt}:</dt>
153 154 <dd title="${h.tooltip(title)}">
154 155 %if callable(dd):
155 156 ## allow lazy evaluation of elements
156 157 ${dd()}
157 158 %else:
158 159 ${dd}
159 160 %endif
160 161 %if show_items:
161 162 <span class="btn-collapse" data-toggle="item-${h.md5_safe(dt)[:6]}-details">${_('Show More')} </span>
162 163 %endif
163 164 </dd>
164 165
165 166 %if show_items:
166 167 <div class="collapsable-content" data-toggle="item-${h.md5_safe(dt)[:6]}-details" style="display: none">
167 168 %for item in show_items:
168 169 <dt></dt>
169 170 <dd>${item}</dd>
170 171 %endfor
171 172 </div>
172 173 %endif
173 174
174 175 %endfor
175 176 </dl>
176 177 </%def>
177 178
178 179 <%def name="tr_info_entry(element)">
179 180 <% key, val, title, show_items = element %>
180 181
181 182 <tr>
182 183 <td style="vertical-align: top">${key}</td>
183 184 <td title="${h.tooltip(title)}">
184 185 %if callable(val):
185 186 ## allow lazy evaluation of elements
186 187 ${val()}
187 188 %else:
188 189 ${val}
189 190 %endif
190 191 %if show_items:
191 192 <div class="collapsable-content" data-toggle="item-${h.md5_safe(val)[:6]}-details" style="display: none">
192 193 % for item in show_items:
193 194 <dt></dt>
194 195 <dd>${item}</dd>
195 196 % endfor
196 197 </div>
197 198 %endif
198 199 </td>
199 200 <td style="vertical-align: top">
200 201 %if show_items:
201 202 <span class="btn-collapse" data-toggle="item-${h.md5_safe(val)[:6]}-details">${_('Show More')} </span>
202 203 %endif
203 204 </td>
204 205 </tr>
205 206
206 207 </%def>
207 208
208 209 <%def name="gravatar(email, size=16, tooltip=False, tooltip_alt=None, user=None, extra_class=None)">
209 210 <%
210 211 if size > 16:
211 212 gravatar_class = ['gravatar','gravatar-large']
212 213 else:
213 214 gravatar_class = ['gravatar']
214 215
215 216 data_hovercard_url = ''
216 217 data_hovercard_alt = tooltip_alt.replace('<', '&lt;').replace('>', '&gt;') if tooltip_alt else ''
217 218
218 219 if tooltip:
219 220 gravatar_class += ['tooltip-hovercard']
220 221 if extra_class:
221 222 gravatar_class += extra_class
222 223 if tooltip and user:
223 224 if user.username == h.DEFAULT_USER:
224 225 gravatar_class.pop(-1)
225 226 else:
226 227 data_hovercard_url = request.route_path('hovercard_user', user_id=getattr(user, 'user_id', ''))
227 228 gravatar_class = ' '.join(gravatar_class)
228 229
229 230 %>
230 231 <%doc>
231 232 TODO: johbo: For now we serve double size images to make it smooth
232 233 for retina. This is how it worked until now. Should be replaced
233 234 with a better solution at some point.
234 235 </%doc>
235 236
236 237 <img class="${gravatar_class}" height="${size}" width="${size}" data-hovercard-url="${data_hovercard_url}" data-hovercard-alt="${data_hovercard_alt}" src="${h.gravatar_url(email, size * 2)}" />
237 238 </%def>
238 239
239 240
240 241 <%def name="gravatar_with_user(contact, size=16, show_disabled=False, tooltip=False, _class='rc-user')">
241 242 <%
242 243 email = h.email_or_none(contact)
243 244 rc_user = h.discover_user(contact)
244 245 %>
245 246
246 247 <div class="${_class}">
247 248 ${self.gravatar(email, size, tooltip=tooltip, tooltip_alt=contact, user=rc_user)}
248 249 <span class="${('user user-disabled' if show_disabled else 'user')}">
249 250 ${h.link_to_user(rc_user or contact)}
250 251 </span>
251 252 </div>
252 253 </%def>
253 254
254 255
255 256 <%def name="user_group_icon(user_group=None, size=16, tooltip=False)">
256 257 <%
257 258 if (size > 16):
258 259 gravatar_class = 'icon-user-group-alt'
259 260 else:
260 261 gravatar_class = 'icon-user-group-alt'
261 262
262 263 if tooltip:
263 264 gravatar_class += ' tooltip-hovercard'
264 265
265 266 data_hovercard_url = request.route_path('hovercard_user_group', user_group_id=user_group.users_group_id)
266 267 %>
267 268 <%doc>
268 269 TODO: johbo: For now we serve double size images to make it smooth
269 270 for retina. This is how it worked until now. Should be replaced
270 271 with a better solution at some point.
271 272 </%doc>
272 273
273 274 <i style="font-size: ${size}px" class="${gravatar_class} x-icon-size-${size}" data-hovercard-url="${data_hovercard_url}"></i>
274 275 </%def>
275 276
276 277 <%def name="repo_page_title(repo_instance)">
277 278 <div class="title-content repo-title">
278 279
279 280 <div class="title-main">
280 281 ## SVN/HG/GIT icons
281 282 %if h.is_hg(repo_instance):
282 283 <i class="icon-hg"></i>
283 284 %endif
284 285 %if h.is_git(repo_instance):
285 286 <i class="icon-git"></i>
286 287 %endif
287 288 %if h.is_svn(repo_instance):
288 289 <i class="icon-svn"></i>
289 290 %endif
290 291
291 292 ## public/private
292 293 %if repo_instance.private:
293 294 <i class="icon-repo-private"></i>
294 295 %else:
295 296 <i class="icon-repo-public"></i>
296 297 %endif
297 298
298 299 ## repo name with group name
299 300 ${h.breadcrumb_repo_link(repo_instance)}
300 301
301 302 ## Context Actions
302 303 <div class="pull-right">
303 304 %if c.rhodecode_user.username != h.DEFAULT_USER:
304 305 <a href="${h.route_path('atom_feed_home', repo_name=c.rhodecode_db_repo.repo_uid, _query=dict(auth_token=c.rhodecode_user.feed_token))}" title="${_('RSS Feed')}" class="btn btn-sm"><i class="icon-rss-sign"></i>RSS</a>
305 306
306 307 <a href="#WatchRepo" onclick="toggleFollowingRepo(this, templateContext.repo_id); return false" title="${_('Watch this Repository and actions on it in your personalized journal')}" class="btn btn-sm ${('watching' if c.repository_is_user_following else '')}">
307 308 % if c.repository_is_user_following:
308 309 <i class="icon-eye-off"></i>${_('Unwatch')}
309 310 % else:
310 311 <i class="icon-eye"></i>${_('Watch')}
311 312 % endif
312 313
313 314 </a>
314 315 %else:
315 316 <a href="${h.route_path('atom_feed_home', repo_name=c.rhodecode_db_repo.repo_uid)}" title="${_('RSS Feed')}" class="btn btn-sm"><i class="icon-rss-sign"></i>RSS</a>
316 317 %endif
317 318 </div>
318 319
319 320 </div>
320 321
321 322 ## FORKED
322 323 %if repo_instance.fork:
323 324 <p class="discreet">
324 325 <i class="icon-code-fork"></i> ${_('Fork of')}
325 326 ${h.link_to_if(c.has_origin_repo_read_perm,repo_instance.fork.repo_name, h.route_path('repo_summary', repo_name=repo_instance.fork.repo_name))}
326 327 </p>
327 328 %endif
328 329
329 330 ## IMPORTED FROM REMOTE
330 331 %if repo_instance.clone_uri:
331 332 <p class="discreet">
332 333 <i class="icon-code-fork"></i> ${_('Clone from')}
333 334 <a href="${h.safe_str(h.hide_credentials(repo_instance.clone_uri))}">${h.hide_credentials(repo_instance.clone_uri)}</a>
334 335 </p>
335 336 %endif
336 337
337 338 ## LOCKING STATUS
338 339 %if repo_instance.locked[0]:
339 340 <p class="locking_locked discreet">
340 341 <i class="icon-repo-lock"></i>
341 342 ${_('Repository locked by %(user)s') % {'user': h.person_by_id(repo_instance.locked[0])}}
342 343 </p>
343 344 %elif repo_instance.enable_locking:
344 345 <p class="locking_unlocked discreet">
345 346 <i class="icon-repo-unlock"></i>
346 347 ${_('Repository not locked. Pull repository to lock it.')}
347 348 </p>
348 349 %endif
349 350
350 351 </div>
351 352 </%def>
352 353
353 354 <%def name="repo_menu(active=None)">
354 355 <%
355 356 ## determine if we have "any" option available
356 357 can_lock = h.HasRepoPermissionAny('repository.write','repository.admin')(c.repo_name) and c.rhodecode_db_repo.enable_locking
357 358 has_actions = can_lock
358 359
359 360 %>
360 361 % if c.rhodecode_db_repo.archived:
361 362 <div class="alert alert-warning text-center">
362 363 <strong>${_('This repository has been archived. It is now read-only.')}</strong>
363 364 </div>
364 365 % endif
365 366
366 367 <!--- REPO CONTEXT BAR -->
367 368 <div id="context-bar">
368 369 <div class="wrapper">
369 370
370 371 <div class="title">
371 372 ${self.repo_page_title(c.rhodecode_db_repo)}
372 373 </div>
373 374
374 375 <ul id="context-pages" class="navigation horizontal-list">
375 376 <li class="${h.is_active('summary', active)}"><a class="menulink" href="${h.route_path('repo_summary_explicit', repo_name=c.repo_name)}"><div class="menulabel">${_('Summary')}</div></a></li>
376 377 <li class="${h.is_active('commits', active)}"><a class="menulink" href="${h.route_path('repo_commits', repo_name=c.repo_name)}"><div class="menulabel">${_('Commits')}</div></a></li>
377 378 <li class="${h.is_active('files', active)}"><a class="menulink" href="${h.repo_files_by_ref_url(c.repo_name, c.rhodecode_db_repo.repo_type, f_path='', ref_name=c.rhodecode_db_repo.landing_ref_name, commit_id='tip', query={'at':c.rhodecode_db_repo.landing_ref_name})}"><div class="menulabel">${_('Files')}</div></a></li>
378 379 <li class="${h.is_active('compare', active)}"><a class="menulink" href="${h.route_path('repo_compare_select',repo_name=c.repo_name)}"><div class="menulabel">${_('Compare')}</div></a></li>
379 380
380 381 ## TODO: anderson: ideally it would have a function on the scm_instance "enable_pullrequest() and enable_fork()"
381 382 %if c.rhodecode_db_repo.repo_type in ['git','hg']:
382 383 <li class="${h.is_active('showpullrequest', active)}">
383 384 <a class="menulink" href="${h.route_path('pullrequest_show_all', repo_name=c.repo_name)}" title="${h.tooltip(_('Show Pull Requests for %s') % c.repo_name)}">
384 385 <div class="menulabel">
385 386 ${_('Pull Requests')} <span class="menulink-counter">${c.repository_pull_requests}</span>
386 387 </div>
387 388 </a>
388 389 </li>
389 390 %endif
390 391
391 392 <li class="${h.is_active('artifacts', active)}">
392 393 <a class="menulink" href="${h.route_path('repo_artifacts_list',repo_name=c.repo_name)}">
393 394 <div class="menulabel">
394 395 ${_('Artifacts')} <span class="menulink-counter">${c.repository_artifacts}</span>
395 396 </div>
396 397 </a>
397 398 </li>
398 399
399 400 %if not c.rhodecode_db_repo.archived and h.HasRepoPermissionAll('repository.admin')(c.repo_name):
400 401 <li class="${h.is_active('settings', active)}"><a class="menulink" href="${h.route_path('edit_repo',repo_name=c.repo_name)}"><div class="menulabel">${_('Repository Settings')}</div></a></li>
401 402 %endif
402 403
403 404 <li class="${h.is_active('options', active)}">
404 405 % if has_actions:
405 406 <a class="menulink dropdown">
406 407 <div class="menulabel">${_('Options')}<div class="show_more"></div></div>
407 408 </a>
408 409 <ul class="submenu">
409 410 %if can_lock:
410 411 %if c.rhodecode_db_repo.locked[0]:
411 412 <li><a class="locking_del" href="${h.route_path('repo_edit_toggle_locking',repo_name=c.repo_name)}">${_('Unlock Repository')}</a></li>
412 413 %else:
413 414 <li><a class="locking_add" href="${h.route_path('repo_edit_toggle_locking',repo_name=c.repo_name)}">${_('Lock Repository')}</a></li>
414 415 %endif
415 416 %endif
416 417 </ul>
417 418 % endif
418 419 </li>
419 420
420 421 </ul>
421 422 </div>
422 423 <div class="clear"></div>
423 424 </div>
424 425
425 426 <!--- REPO END CONTEXT BAR -->
426 427
427 428 </%def>
428 429
429 430 <%def name="repo_group_page_title(repo_group_instance)">
430 431 <div class="title-content">
431 432 <div class="title-main">
432 433 ## Repository Group icon
433 434 <i class="icon-repo-group"></i>
434 435
435 436 ## repo name with group name
436 437 ${h.breadcrumb_repo_group_link(repo_group_instance)}
437 438 </div>
438 439
439 440 <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
440 441 <div class="repo-group-desc discreet">
441 442 ${dt.repo_group_desc(repo_group_instance.description_safe, repo_group_instance.personal, c.visual.stylify_metatags)}
442 443 </div>
443 444
444 445 </div>
445 446 </%def>
446 447
447 448
448 449 <%def name="repo_group_menu(active=None)">
449 450 <%
450 451 gr_name = c.repo_group.group_name if c.repo_group else None
451 452 # create repositories with write permission on group is set to true
452 453 group_admin = h.HasRepoGroupPermissionAny('group.admin')(gr_name, 'group admin index page')
453 454
454 455 %>
455 456
456 457
457 458 <!--- REPO GROUP CONTEXT BAR -->
458 459 <div id="context-bar">
459 460 <div class="wrapper">
460 461 <div class="title">
461 462 ${self.repo_group_page_title(c.repo_group)}
462 463 </div>
463 464
464 465 <ul id="context-pages" class="navigation horizontal-list">
465 466 <li class="${h.is_active('home', active)}">
466 467 <a class="menulink" href="${h.route_path('repo_group_home', repo_group_name=c.repo_group.group_name)}"><div class="menulabel">${_('Group Home')}</div></a>
467 468 </li>
468 469 % if c.is_super_admin or group_admin:
469 470 <li class="${h.is_active('settings', active)}">
470 471 <a class="menulink" href="${h.route_path('edit_repo_group',repo_group_name=c.repo_group.group_name)}" title="${_('You have admin right to this group, and can edit it')}"><div class="menulabel">${_('Group Settings')}</div></a>
471 472 </li>
472 473 % endif
473 474
474 475 </ul>
475 476 </div>
476 477 <div class="clear"></div>
477 478 </div>
478 479
479 480 <!--- REPO GROUP CONTEXT BAR -->
480 481
481 482 </%def>
482 483
483 484
484 485 <%def name="usermenu(active=False)">
485 486 <%
486 487 not_anonymous = c.rhodecode_user.username != h.DEFAULT_USER
487 488
488 489 gr_name = c.repo_group.group_name if (hasattr(c, 'repo_group') and c.repo_group) else None
489 490 # create repositories with write permission on group is set to true
490 491
491 492 can_fork = c.is_super_admin or h.HasPermissionAny('hg.fork.repository')()
492 493 create_on_write = h.HasPermissionAny('hg.create.write_on_repogroup.true')()
493 494 group_write = h.HasRepoGroupPermissionAny('group.write')(gr_name, 'can write into group index page')
494 495 group_admin = h.HasRepoGroupPermissionAny('group.admin')(gr_name, 'group admin index page')
495 496
496 497 can_create_repos = c.is_super_admin or c.can_create_repo
497 498 can_create_repo_groups = c.is_super_admin or c.can_create_repo_group
498 499
499 500 can_create_repos_in_group = c.is_super_admin or group_admin or (group_write and create_on_write)
500 501 can_create_repo_groups_in_group = c.is_super_admin or group_admin
501 502 %>
502 503
503 504 % if not_anonymous:
504 505 <%
505 506 default_target_group = dict()
506 507 if c.rhodecode_user.personal_repo_group:
507 508 default_target_group = dict(parent_group=c.rhodecode_user.personal_repo_group.group_id)
508 509 %>
509 510
510 511 ## create action
511 512 <li>
512 513 <a href="#create-actions" onclick="return false;" class="menulink childs">
513 514 <i class="icon-plus-circled"></i>
514 515 </a>
515 516
516 517 <div class="action-menu submenu">
517 518
518 519 <ol>
519 520 ## scope of within a repository
520 521 % if hasattr(c, 'rhodecode_db_repo') and c.rhodecode_db_repo:
521 522 <li class="submenu-title">${_('This Repository')}</li>
522 523 <li>
523 524 <a href="${h.route_path('pullrequest_new',repo_name=c.repo_name)}">${_('Create Pull Request')}</a>
524 525 </li>
525 526 % if can_fork:
526 527 <li>
527 528 <a href="${h.route_path('repo_fork_new',repo_name=c.repo_name,_query=default_target_group)}">${_('Fork this repository')}</a>
528 529 </li>
529 530 % endif
530 531 % endif
531 532
532 533 ## scope of within repository groups
533 534 % if hasattr(c, 'repo_group') and c.repo_group and (can_create_repos_in_group or can_create_repo_groups_in_group):
534 535 <li class="submenu-title">${_('This Repository Group')}</li>
535 536
536 537 % if can_create_repos_in_group:
537 538 <li>
538 539 <a href="${h.route_path('repo_new',_query=dict(parent_group=c.repo_group.group_id))}">${_('New Repository')}</a>
539 540 </li>
540 541 % endif
541 542
542 543 % if can_create_repo_groups_in_group:
543 544 <li>
544 545 <a href="${h.route_path('repo_group_new',_query=dict(parent_group=c.repo_group.group_id))}">${_(u'New Repository Group')}</a>
545 546 </li>
546 547 % endif
547 548 % endif
548 549
549 550 ## personal group
550 551 % if c.rhodecode_user.personal_repo_group:
551 552 <li class="submenu-title">Personal Group</li>
552 553
553 554 <li>
554 555 <a href="${h.route_path('repo_new',_query=dict(parent_group=c.rhodecode_user.personal_repo_group.group_id))}" >${_('New Repository')} </a>
555 556 </li>
556 557
557 558 <li>
558 559 <a href="${h.route_path('repo_group_new',_query=dict(parent_group=c.rhodecode_user.personal_repo_group.group_id))}">${_('New Repository Group')} </a>
559 560 </li>
560 561 % endif
561 562
562 563 ## Global actions
563 564 <li class="submenu-title">RhodeCode</li>
564 565 % if can_create_repos:
565 566 <li>
566 567 <a href="${h.route_path('repo_new')}" >${_('New Repository')}</a>
567 568 </li>
568 569 % endif
569 570
570 571 % if can_create_repo_groups:
571 572 <li>
572 573 <a href="${h.route_path('repo_group_new')}" >${_(u'New Repository Group')}</a>
573 574 </li>
574 575 % endif
575 576
576 577 <li>
577 578 <a href="${h.route_path('gists_new')}">${_(u'New Gist')}</a>
578 579 </li>
579 580
580 581 </ol>
581 582
582 583 </div>
583 584 </li>
584 585
585 586 ## notifications
586 587 <li>
587 588 <a class="${('empty' if c.unread_notifications == 0 else '')}" href="${h.route_path('notifications_show_all')}">
588 589 ${c.unread_notifications}
589 590 </a>
590 591 </li>
591 592 % endif
592 593
593 594 ## USER MENU
594 595 <li id="quick_login_li" class="${'active' if active else ''}">
595 596 % if c.rhodecode_user.username == h.DEFAULT_USER:
596 597 <a id="quick_login_link" class="menulink childs" href="${h.route_path('login', _query={'came_from': h.current_route_path(request)})}">
597 598 ${gravatar(c.rhodecode_user.email, 20)}
598 599 <span class="user">
599 600 <span>${_('Sign in')}</span>
600 601 </span>
601 602 </a>
602 603 % else:
603 604 ## logged in user
604 605 <a id="quick_login_link" class="menulink childs">
605 606 ${gravatar(c.rhodecode_user.email, 20)}
606 607 <span class="user">
607 608 <span class="menu_link_user">${c.rhodecode_user.username}</span>
608 609 <div class="show_more"></div>
609 610 </span>
610 611 </a>
611 612 ## subnav with menu for logged in user
612 613 <div class="user-menu submenu">
613 614 <div id="quick_login">
614 615 %if c.rhodecode_user.username != h.DEFAULT_USER:
615 616 <div class="">
616 617 <div class="big_gravatar">${gravatar(c.rhodecode_user.email, 48)}</div>
617 618 <div class="full_name">${c.rhodecode_user.full_name_or_username}</div>
618 619 <div class="email">${c.rhodecode_user.email}</div>
619 620 </div>
620 621 <div class="">
621 622 <ol class="links">
622 623 <li>${h.link_to(_(u'My account'),h.route_path('my_account_profile'))}</li>
623 624 % if c.rhodecode_user.personal_repo_group:
624 625 <li>${h.link_to(_(u'My personal group'), h.route_path('repo_group_home', repo_group_name=c.rhodecode_user.personal_repo_group.group_name))}</li>
625 626 % endif
626 627 <li>${h.link_to(_(u'Pull Requests'), h.route_path('my_account_pullrequests'))}</li>
627 628
628 629 % if c.debug_style:
629 630 <li>
630 631 <a class="menulink" title="${_('Style')}" href="${h.route_path('debug_style_home')}">
631 632 <div class="menulabel">${_('[Style]')}</div>
632 633 </a>
633 634 </li>
634 635 % endif
635 636
636 637 ## bookmark-items
637 638 <li class="bookmark-items">
638 639 ${_('Bookmarks')}
639 640 <div class="pull-right">
640 641 <a href="${h.route_path('my_account_bookmarks')}">
641 642
642 643 <i class="icon-cog"></i>
643 644 </a>
644 645 </div>
645 646 </li>
646 647 % if not c.bookmark_items:
647 648 <li>
648 649 <a href="${h.route_path('my_account_bookmarks')}">${_('No Bookmarks yet.')}</a>
649 650 </li>
650 651 % endif
651 652 % for item in c.bookmark_items:
652 653 <li>
653 654 % if item.repository:
654 655 <div>
655 656 <a class="bookmark-item" href="${h.route_path('my_account_goto_bookmark', bookmark_id=item.position)}">
656 657 <code>${item.position}</code>
657 658 % if item.repository.repo_type == 'hg':
658 659 <i class="icon-hg" title="${_('Repository')}" style="font-size: 16px"></i>
659 660 % elif item.repository.repo_type == 'git':
660 661 <i class="icon-git" title="${_('Repository')}" style="font-size: 16px"></i>
661 662 % elif item.repository.repo_type == 'svn':
662 663 <i class="icon-svn" title="${_('Repository')}" style="font-size: 16px"></i>
663 664 % endif
664 665 ${(item.title or h.shorter(item.repository.repo_name, 30))}
665 666 </a>
666 667 </div>
667 668 % elif item.repository_group:
668 669 <div>
669 670 <a class="bookmark-item" href="${h.route_path('my_account_goto_bookmark', bookmark_id=item.position)}">
670 671 <code>${item.position}</code>
671 672 <i class="icon-repo-group" title="${_('Repository group')}" style="font-size: 14px"></i>
672 673 ${(item.title or h.shorter(item.repository_group.group_name, 30))}
673 674 </a>
674 675 </div>
675 676 % else:
676 677 <a class="bookmark-item" href="${h.route_path('my_account_goto_bookmark', bookmark_id=item.position)}">
677 678 <code>${item.position}</code>
678 679 ${item.title}
679 680 </a>
680 681 % endif
681 682 </li>
682 683 % endfor
683 684
684 685 <li class="logout">
685 686 ${h.secure_form(h.route_path('logout'), request=request)}
686 687 ${h.submit('log_out', _(u'Sign Out'),class_="btn btn-primary")}
687 688 ${h.end_form()}
688 689 </li>
689 690 </ol>
690 691 </div>
691 692 %endif
692 693 </div>
693 694 </div>
694 695
695 696 % endif
696 697 </li>
697 698 </%def>
698 699
699 700 <%def name="menu_items(active=None)">
700 701 <%
701 702 notice_messages, notice_level = c.rhodecode_user.get_notice_messages()
702 703 notice_display = 'none' if len(notice_messages) == 0 else ''
703 704 %>
704 705
705 706 <ul id="quick" class="main_nav navigation horizontal-list">
706 707 ## notice box for important system messages
707 708 <li style="display: ${notice_display}">
708 709 <a class="notice-box" href="#openNotice" onclick="$('.notice-messages-container').toggle(); return false">
709 710 <div class="menulabel-notice ${notice_level}" >
710 711 ${len(notice_messages)}
711 712 </div>
712 713 </a>
713 714 </li>
714 715 <div class="notice-messages-container" style="display: none">
715 716 <div class="notice-messages">
716 717 <table class="rctable">
717 718 % for notice in notice_messages:
718 719 <tr id="notice-message-${notice['msg_id']}" class="notice-message-${notice['level']}">
719 720 <td style="vertical-align: text-top; width: 20px">
720 721 <i class="tooltip icon-info notice-color-${notice['level']}" title="${notice['level']}"></i>
721 722 </td>
722 723 <td>
723 724 <span><i class="icon-plus-squared cursor-pointer" onclick="$('#notice-${notice['msg_id']}').toggle()"></i> </span>
724 725 ${notice['subject']}
725 726
726 727 <div id="notice-${notice['msg_id']}" style="display: none">
727 728 ${h.render(notice['body'], renderer='markdown')}
728 729 </div>
729 730 </td>
730 731 <td style="vertical-align: text-top; width: 35px;">
731 732 <a class="tooltip" title="${_('dismiss')}" href="#dismiss" onclick="dismissNotice(${notice['msg_id']});return false">
732 733 <i class="icon-remove icon-filled-red"></i>
733 734 </a>
734 735 </td>
735 736 </tr>
736 737
737 738 % endfor
738 739 </table>
739 740 </div>
740 741 </div>
741 742 ## Main filter
742 743 <li>
743 744 <div class="menulabel main_filter_box">
744 745 <div class="main_filter_input_box">
745 746 <ul class="searchItems">
746 747
747 748 <li class="searchTag searchTagIcon">
748 749 <i class="icon-search"></i>
749 750 </li>
750 751
751 752 % if c.template_context['search_context']['repo_id']:
752 753 <li class="searchTag searchTagFilter searchTagHidable" >
753 754 ##<a href="${h.route_path('search_repo',repo_name=c.template_context['search_context']['repo_name'])}">
754 755 <span class="tag">
755 756 This repo
756 757 <a href="#removeGoToFilter" onclick="removeGoToFilter(); return false"><i class="icon-cancel-circled"></i></a>
757 758 </span>
758 759 ##</a>
759 760 </li>
760 761 % elif c.template_context['search_context']['repo_group_id']:
761 762 <li class="searchTag searchTagFilter searchTagHidable">
762 763 ##<a href="${h.route_path('search_repo_group',repo_group_name=c.template_context['search_context']['repo_group_name'])}">
763 764 <span class="tag">
764 765 This group
765 766 <a href="#removeGoToFilter" onclick="removeGoToFilter(); return false"><i class="icon-cancel-circled"></i></a>
766 767 </span>
767 768 ##</a>
768 769 </li>
769 770 % endif
770 771
771 772 <li class="searchTagInput">
772 773 <input class="main_filter_input" id="main_filter" size="25" type="text" name="main_filter" placeholder="${_('search / go to...')}" value="" />
773 774 </li>
774 775 <li class="searchTag searchTagHelp">
775 776 <a href="#showFilterHelp" onclick="showMainFilterBox(); return false">?</a>
776 777 </li>
777 778 </ul>
778 779 </div>
779 780 </div>
780 781
781 782 <div id="main_filter_help" style="display: none">
782 783 - Use '/' key to quickly access this field.
783 784
784 785 - Enter a name of repository, or repository group for quick search.
785 786
786 787 - Prefix query to allow special search:
787 788
788 789 <strong>user:</strong>admin, to search for usernames, always global
789 790
790 791 <strong>user_group:</strong>devops, to search for user groups, always global
791 792
792 793 <strong>pr:</strong>303, to search for pull request number, title, or description, always global
793 794
794 795 <strong>commit:</strong>efced4, to search for commits, scoped to repositories or groups
795 796
796 797 <strong>file:</strong>models.py, to search for file paths, scoped to repositories or groups
797 798
798 799 % if c.template_context['search_context']['repo_id']:
799 800 For advanced full text search visit: <a href="${h.route_path('search_repo',repo_name=c.template_context['search_context']['repo_name'])}">repository search</a>
800 801 % elif c.template_context['search_context']['repo_group_id']:
801 802 For advanced full text search visit: <a href="${h.route_path('search_repo_group',repo_group_name=c.template_context['search_context']['repo_group_name'])}">repository group search</a>
802 803 % else:
803 804 For advanced full text search visit: <a href="${h.route_path('search')}">global search</a>
804 805 % endif
805 806 </div>
806 807 </li>
807 808
808 809 ## ROOT MENU
809 810 <li class="${h.is_active('home', active)}">
810 811 <a class="menulink" title="${_('Home')}" href="${h.route_path('home')}">
811 812 <div class="menulabel">${_('Home')}</div>
812 813 </a>
813 814 </li>
814 815
815 816 %if c.rhodecode_user.username != h.DEFAULT_USER:
816 817 <li class="${h.is_active('journal', active)}">
817 818 <a class="menulink" title="${_('Show activity journal')}" href="${h.route_path('journal')}">
818 819 <div class="menulabel">${_('Journal')}</div>
819 820 </a>
820 821 </li>
821 822 %else:
822 823 <li class="${h.is_active('journal', active)}">
823 824 <a class="menulink" title="${_('Show Public activity journal')}" href="${h.route_path('journal_public')}">
824 825 <div class="menulabel">${_('Public journal')}</div>
825 826 </a>
826 827 </li>
827 828 %endif
828 829
829 830 <li class="${h.is_active('gists', active)}">
830 831 <a class="menulink childs" title="${_('Show Gists')}" href="${h.route_path('gists_show')}">
831 832 <div class="menulabel">${_('Gists')}</div>
832 833 </a>
833 834 </li>
834 835
835 836 % if c.is_super_admin or c.is_delegated_admin:
836 837 <li class="${h.is_active('admin', active)}">
837 838 <a class="menulink childs" title="${_('Admin settings')}" href="${h.route_path('admin_home')}">
838 839 <div class="menulabel">${_('Admin')} </div>
839 840 </a>
840 841 </li>
841 842 % endif
842 843
843 844 ## render extra user menu
844 845 ${usermenu(active=(active=='my_account'))}
845 846
846 847 </ul>
847 848
848 849 <script type="text/javascript">
849 850 var visualShowPublicIcon = "${c.visual.show_public_icon}" == "True";
850 851
851 852 var formatRepoResult = function(result, container, query, escapeMarkup) {
852 853 return function(data, escapeMarkup) {
853 854 if (!data.repo_id){
854 855 return data.text; // optgroup text Repositories
855 856 }
856 857
857 858 var tmpl = '';
858 859 var repoType = data['repo_type'];
859 860 var repoName = data['text'];
860 861
861 862 if(data && data.type == 'repo'){
862 863 if(repoType === 'hg'){
863 864 tmpl += '<i class="icon-hg"></i> ';
864 865 }
865 866 else if(repoType === 'git'){
866 867 tmpl += '<i class="icon-git"></i> ';
867 868 }
868 869 else if(repoType === 'svn'){
869 870 tmpl += '<i class="icon-svn"></i> ';
870 871 }
871 872 if(data['private']){
872 873 tmpl += '<i class="icon-lock" ></i> ';
873 874 }
874 875 else if(visualShowPublicIcon){
875 876 tmpl += '<i class="icon-unlock-alt"></i> ';
876 877 }
877 878 }
878 879 tmpl += escapeMarkup(repoName);
879 880 return tmpl;
880 881
881 882 }(result, escapeMarkup);
882 883 };
883 884
884 885 var formatRepoGroupResult = function(result, container, query, escapeMarkup) {
885 886 return function(data, escapeMarkup) {
886 887 if (!data.repo_group_id){
887 888 return data.text; // optgroup text Repositories
888 889 }
889 890
890 891 var tmpl = '';
891 892 var repoGroupName = data['text'];
892 893
893 894 if(data){
894 895
895 896 tmpl += '<i class="icon-repo-group"></i> ';
896 897
897 898 }
898 899 tmpl += escapeMarkup(repoGroupName);
899 900 return tmpl;
900 901
901 902 }(result, escapeMarkup);
902 903 };
903 904
904 905 var escapeRegExChars = function (value) {
905 906 return value.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
906 907 };
907 908
908 909 var getRepoIcon = function(repo_type) {
909 910 if (repo_type === 'hg') {
910 911 return '<i class="icon-hg"></i> ';
911 912 }
912 913 else if (repo_type === 'git') {
913 914 return '<i class="icon-git"></i> ';
914 915 }
915 916 else if (repo_type === 'svn') {
916 917 return '<i class="icon-svn"></i> ';
917 918 }
918 919 return ''
919 920 };
920 921
921 922 var autocompleteMainFilterFormatResult = function (data, value, org_formatter) {
922 923
923 924 if (value.split(':').length === 2) {
924 925 value = value.split(':')[1]
925 926 }
926 927
927 928 var searchType = data['type'];
928 929 var searchSubType = data['subtype'];
929 930 var valueDisplay = data['value_display'];
930 931 var valueIcon = data['value_icon'];
931 932
932 933 var pattern = '(' + escapeRegExChars(value) + ')';
933 934
934 935 valueDisplay = Select2.util.escapeMarkup(valueDisplay);
935 936
936 937 // highlight match
937 938 if (searchType != 'text') {
938 939 valueDisplay = valueDisplay.replace(new RegExp(pattern, 'gi'), '<strong>$1<\/strong>');
939 940 }
940 941
941 942 var icon = '';
942 943
943 944 if (searchType === 'hint') {
944 945 icon += '<i class="icon-repo-group"></i> ';
945 946 }
946 947 // full text search/hints
947 948 else if (searchType === 'search') {
948 949 if (valueIcon === undefined) {
949 950 icon += '<i class="icon-more"></i> ';
950 951 } else {
951 952 icon += valueIcon + ' ';
952 953 }
953 954
954 955 if (searchSubType !== undefined && searchSubType == 'repo') {
955 956 valueDisplay += '<div class="pull-right tag">repository</div>';
956 957 }
957 958 else if (searchSubType !== undefined && searchSubType == 'repo_group') {
958 959 valueDisplay += '<div class="pull-right tag">repo group</div>';
959 960 }
960 961 }
961 962 // repository
962 963 else if (searchType === 'repo') {
963 964
964 965 var repoIcon = getRepoIcon(data['repo_type']);
965 966 icon += repoIcon;
966 967
967 968 if (data['private']) {
968 969 icon += '<i class="icon-lock" ></i> ';
969 970 }
970 971 else if (visualShowPublicIcon) {
971 972 icon += '<i class="icon-unlock-alt"></i> ';
972 973 }
973 974 }
974 975 // repository groups
975 976 else if (searchType === 'repo_group') {
976 977 icon += '<i class="icon-repo-group"></i> ';
977 978 }
978 979 // user group
979 980 else if (searchType === 'user_group') {
980 981 icon += '<i class="icon-group"></i> ';
981 982 }
982 983 // user
983 984 else if (searchType === 'user') {
984 985 icon += '<img class="gravatar" src="{0}"/>'.format(data['icon_link']);
985 986 }
986 987 // pull request
987 988 else if (searchType === 'pull_request') {
988 989 icon += '<i class="icon-merge"></i> ';
989 990 }
990 991 // commit
991 992 else if (searchType === 'commit') {
992 993 var repo_data = data['repo_data'];
993 994 var repoIcon = getRepoIcon(repo_data['repository_type']);
994 995 if (repoIcon) {
995 996 icon += repoIcon;
996 997 } else {
997 998 icon += '<i class="icon-tag"></i>';
998 999 }
999 1000 }
1000 1001 // file
1001 1002 else if (searchType === 'file') {
1002 1003 var repo_data = data['repo_data'];
1003 1004 var repoIcon = getRepoIcon(repo_data['repository_type']);
1004 1005 if (repoIcon) {
1005 1006 icon += repoIcon;
1006 1007 } else {
1007 1008 icon += '<i class="icon-tag"></i>';
1008 1009 }
1009 1010 }
1010 1011 // generic text
1011 1012 else if (searchType === 'text') {
1012 1013 icon = '';
1013 1014 }
1014 1015
1015 1016 var tmpl = '<div class="ac-container-wrap">{0}{1}</div>';
1016 1017 return tmpl.format(icon, valueDisplay);
1017 1018 };
1018 1019
1019 1020 var handleSelect = function(element, suggestion) {
1020 1021 if (suggestion.type === "hint") {
1021 1022 // we skip action
1022 1023 $('#main_filter').focus();
1023 1024 }
1024 1025 else if (suggestion.type === "text") {
1025 1026 // we skip action
1026 1027 $('#main_filter').focus();
1027 1028
1028 1029 } else {
1029 1030 window.location = suggestion['url'];
1030 1031 }
1031 1032 };
1032 1033
1033 1034 var autocompleteMainFilterResult = function (suggestion, originalQuery, queryLowerCase) {
1034 1035 if (queryLowerCase.split(':').length === 2) {
1035 1036 queryLowerCase = queryLowerCase.split(':')[1]
1036 1037 }
1037 1038 if (suggestion.type === "text") {
1038 1039 // special case we don't want to "skip" display for
1039 1040 return true
1040 1041 }
1041 1042 return suggestion.value_display.toLowerCase().indexOf(queryLowerCase) !== -1;
1042 1043 };
1043 1044
1044 1045 var cleanContext = {
1045 1046 repo_view_type: null,
1046 1047
1047 1048 repo_id: null,
1048 1049 repo_name: "",
1049 1050
1050 1051 repo_group_id: null,
1051 1052 repo_group_name: null
1052 1053 };
1053 1054 var removeGoToFilter = function () {
1054 1055 $('.searchTagHidable').hide();
1055 1056 $('#main_filter').autocomplete(
1056 1057 'setOptions', {params:{search_context: cleanContext}});
1057 1058 };
1058 1059
1059 1060 $('#main_filter').autocomplete({
1060 1061 serviceUrl: pyroutes.url('goto_switcher_data'),
1061 1062 params: {
1062 1063 "search_context": templateContext.search_context
1063 1064 },
1064 1065 minChars:2,
1065 1066 maxHeight:400,
1066 1067 deferRequestBy: 300, //miliseconds
1067 1068 tabDisabled: true,
1068 1069 autoSelectFirst: false,
1069 1070 containerClass: 'autocomplete-qfilter-suggestions',
1070 1071 formatResult: autocompleteMainFilterFormatResult,
1071 1072 lookupFilter: autocompleteMainFilterResult,
1072 1073 onSelect: function (element, suggestion) {
1073 1074 handleSelect(element, suggestion);
1074 1075 return false;
1075 1076 },
1076 1077 onSearchError: function (element, query, jqXHR, textStatus, errorThrown) {
1077 1078 if (jqXHR !== 'abort') {
1078 1079 var message = formatErrorMessage(jqXHR, textStatus, errorThrown);
1079 1080 SwalNoAnimation.fire({
1080 1081 icon: 'error',
1081 1082 title: _gettext('Error during search operation'),
1082 1083 html: '<span style="white-space: pre-line">{0}</span>'.format(message),
1083 1084 }).then(function(result) {
1084 1085 window.location.reload();
1085 1086 })
1086 1087 }
1087 1088 },
1088 1089 onSearchStart: function (params) {
1089 1090 $('.searchTag.searchTagIcon').html('<i class="icon-spin animate-spin"></i>')
1090 1091 },
1091 1092 onSearchComplete: function (query, suggestions) {
1092 1093 $('.searchTag.searchTagIcon').html('<i class="icon-search"></i>')
1093 1094 },
1094 1095 });
1095 1096
1096 1097 showMainFilterBox = function () {
1097 1098 $('#main_filter_help').toggle();
1098 1099 };
1099 1100
1100 1101 $('#main_filter').on('keydown.autocomplete', function (e) {
1101 1102
1102 1103 var BACKSPACE = 8;
1103 1104 var el = $(e.currentTarget);
1104 1105 if(e.which === BACKSPACE){
1105 1106 var inputVal = el.val();
1106 1107 if (inputVal === ""){
1107 1108 removeGoToFilter()
1108 1109 }
1109 1110 }
1110 1111 });
1111 1112
1112 1113 var dismissNotice = function(noticeId) {
1113 1114
1114 1115 var url = pyroutes.url('user_notice_dismiss',
1115 1116 {"user_id": templateContext.rhodecode_user.user_id});
1116 1117
1117 1118 var postData = {
1118 1119 'csrf_token': CSRF_TOKEN,
1119 1120 'notice_id': noticeId,
1120 1121 };
1121 1122
1122 1123 var success = function(response) {
1123 1124 $('#notice-message-' + noticeId).remove();
1124 1125 return false;
1125 1126 };
1126 1127 var failure = function(data, textStatus, xhr) {
1127 1128 alert("error processing request: " + textStatus);
1128 1129 return false;
1129 1130 };
1130 1131 ajaxPOST(url, postData, success, failure);
1131 1132 }
1132 1133
1133 1134 var hideLicenseWarning = function () {
1134 1135 var fingerprint = templateContext.session_attrs.license_fingerprint;
1135 1136 storeUserSessionAttr('rc_user_session_attr.hide_license_warning', fingerprint);
1136 1137 $('#notifications').hide();
1137 1138 }
1138 1139
1139 1140 var hideLicenseError = function () {
1140 1141 var fingerprint = templateContext.session_attrs.license_fingerprint;
1141 1142 storeUserSessionAttr('rc_user_session_attr.hide_license_error', fingerprint);
1142 1143 $('#notifications').hide();
1143 1144 }
1144 1145
1145 1146 </script>
1146 1147 <script src="${h.asset('js/rhodecode/base/keyboard-bindings.js', ver=c.rhodecode_version_hash)}"></script>
1147 1148 </%def>
1148 1149
1149 1150 <div class="modal" id="help_kb" tabindex="-1" role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
1150 1151 <div class="modal-dialog">
1151 1152 <div class="modal-content">
1152 1153 <div class="modal-header">
1153 1154 <button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button>
1154 1155 <h4 class="modal-title" id="myModalLabel">${_('Keyboard shortcuts')}</h4>
1155 1156 </div>
1156 1157 <div class="modal-body">
1157 1158 <div class="block-left">
1158 1159 <table class="keyboard-mappings">
1159 1160 <tbody>
1160 1161 <tr>
1161 1162 <th></th>
1162 1163 <th>${_('Site-wide shortcuts')}</th>
1163 1164 </tr>
1164 1165 <%
1165 1166 elems = [
1166 1167 ('/', 'Use quick search box'),
1167 1168 ('g h', 'Goto home page'),
1168 1169 ('g g', 'Goto my private gists page'),
1169 1170 ('g G', 'Goto my public gists page'),
1170 1171 ('g 0-9', 'Goto bookmarked items from 0-9'),
1171 1172 ('n r', 'New repository page'),
1172 1173 ('n g', 'New gist page'),
1173 1174 ]
1174 1175 %>
1175 1176 %for key, desc in elems:
1176 1177 <tr>
1177 1178 <td class="keys">
1178 1179 <span class="key tag">${key}</span>
1179 1180 </td>
1180 1181 <td>${desc}</td>
1181 1182 </tr>
1182 1183 %endfor
1183 1184 </tbody>
1184 1185 </table>
1185 1186 </div>
1186 1187 <div class="block-left">
1187 1188 <table class="keyboard-mappings">
1188 1189 <tbody>
1189 1190 <tr>
1190 1191 <th></th>
1191 1192 <th>${_('Repositories')}</th>
1192 1193 </tr>
1193 1194 <%
1194 1195 elems = [
1195 1196 ('g s', 'Goto summary page'),
1196 1197 ('g c', 'Goto changelog page'),
1197 1198 ('g f', 'Goto files page'),
1198 1199 ('g F', 'Goto files page with file search activated'),
1199 1200 ('g p', 'Goto pull requests page'),
1200 1201 ('g o', 'Goto repository settings'),
1201 1202 ('g O', 'Goto repository access permissions settings'),
1202 1203 ('t s', 'Toggle sidebar on some pages'),
1203 1204 ]
1204 1205 %>
1205 1206 %for key, desc in elems:
1206 1207 <tr>
1207 1208 <td class="keys">
1208 1209 <span class="key tag">${key}</span>
1209 1210 </td>
1210 1211 <td>${desc}</td>
1211 1212 </tr>
1212 1213 %endfor
1213 1214 </tbody>
1214 1215 </table>
1215 1216 </div>
1216 1217 </div>
1217 1218 <div class="modal-footer">
1218 1219 </div>
1219 1220 </div><!-- /.modal-content -->
1220 1221 </div><!-- /.modal-dialog -->
1221 1222 </div><!-- /.modal -->
1222 1223
1223 1224
1224 1225 <script type="text/javascript">
1225 1226 (function () {
1226 1227 "use sctrict";
1227 1228
1228 1229 // details block auto-hide menu
1229 1230 $(document).mouseup(function(e) {
1230 1231 var container = $('.details-inline-block');
1231 1232 if (!container.is(e.target) && container.has(e.target).length === 0) {
1232 1233 $('.details-inline-block[open]').removeAttr('open')
1233 1234 }
1234 1235 });
1235 1236
1236 1237 var $sideBar = $('.right-sidebar');
1237 1238 var expanded = $sideBar.hasClass('right-sidebar-expanded');
1238 1239 var sidebarState = templateContext.session_attrs.sidebarState;
1239 1240 var sidebarEnabled = $('aside.right-sidebar').get(0);
1240 1241
1241 1242 if (sidebarState === 'expanded') {
1242 1243 expanded = true
1243 1244 } else if (sidebarState === 'collapsed') {
1244 1245 expanded = false
1245 1246 }
1246 1247 if (sidebarEnabled) {
1247 1248 // show sidebar since it's hidden on load
1248 1249 $('.right-sidebar').show();
1249 1250
1250 1251 // init based on set initial class, or if defined user session attrs
1251 1252 if (expanded) {
1252 1253 window.expandSidebar();
1253 1254 window.updateStickyHeader();
1254 1255
1255 1256 } else {
1256 1257 window.collapseSidebar();
1257 1258 window.updateStickyHeader();
1258 1259 }
1259 1260 }
1260 1261 })()
1261 1262
1262 1263 </script>
@@ -1,169 +1,169 b''
1 1 ## snippet for displaying default permission box
2 2 ## usage:
3 3 ## <%namespace name="dpb" file="/base/default_perms_box.mako"/>
4 4 ## ${dpb.default_perms_box(<url_to_form>)}
5 5 ## ${dpb.default_perms_radios()}
6 6 <%namespace name="base" file="/base/base.mako"/>
7 7
8 8 <%def name="default_perms_radios(global_permissions_template = False, suffix='', **kwargs)">
9 9 <div class="main-content-full-width">
10 10 <div class="panel panel-default">
11 11
12 12 ## displayed according to checkbox selection
13 13 <div class="panel-heading">
14 14 %if not global_permissions_template:
15 15 <h3 class="inherit_overlay_default panel-title">
16 16 % if hasattr(c, 'user'):
17 17 ${base.gravatar_with_user(c.user.username, 16, tooltip=False, _class='pull-left')} &nbsp;-
18 18 % endif
19 19 ${_('Inherited Permissions')}
20 20 </h3>
21 21 <h3 class="inherit_overlay panel-title">
22 22 % if hasattr(c, 'user'):
23 23 ${base.gravatar_with_user(c.user.username, 16, tooltip=False, _class='pull-left')} &nbsp;-
24 24 % endif
25 25 ${_('Custom Permissions')}
26 26 </h3>
27 27 %else:
28 28 <h3 class="panel-title">
29 29 ${_('Default Global Permissions')}
30 30 </h3>
31 31 %endif
32 32 </div>
33 33
34 34 <div class="panel-body">
35 35 %if global_permissions_template:
36 36 <p>${_('The following options configure the default permissions each user or group will inherit. You can override these permissions for each individual user or user group using individual permissions settings.')}</p>
37 37 %endif
38 38 <div class="field">
39 39 <div class="label">
40 40 <label for="default_repo_create${suffix}">${_('Repository Creation')}:</label>
41 41 </div>
42 42 <div class="radios">
43 43 ${h.radio('default_repo_create' + suffix, c.repo_create_choices[1][0], label=c.repo_create_choices[1][1], **kwargs)}
44 44 ${h.radio('default_repo_create' + suffix, c.repo_create_choices[0][0], label=c.repo_create_choices[0][1], **kwargs)}
45 45 <span class="help-block">${_('Permission to create root level repositories. When disabled, users can still create repositories inside their own repository groups.')}</span>
46 46 </div>
47 47 </div>
48 48 <div class="field">
49 49 <div class="label">
50 50 <label for="default_repo_create_on_write${suffix}">${_('Repository Creation With Group Write Access')}:</label>
51 51 </div>
52 52 <div class="radios">
53 53 ${h.radio('default_repo_create_on_write' + suffix, c.repo_create_on_write_choices[1][0], label=c.repo_create_on_write_choices[1][1], **kwargs)}
54 54 ${h.radio('default_repo_create_on_write' + suffix, c.repo_create_on_write_choices[0][0], label=c.repo_create_on_write_choices[0][1], **kwargs)}
55 55 <span class="help-block">${_('Write permission given on a repository group will allow creating repositories inside that group.')}</span>
56 56 </div>
57 57 </div>
58 58 <div class="field">
59 59 <div class="label">
60 60 <label for="default_fork_create${suffix}">${_('Repository Forking')}:</label>
61 61 </div>
62 62 <div class="radios">
63 63 ${h.radio('default_fork_create' + suffix, c.fork_choices[1][0], label=c.fork_choices[1][1], **kwargs)}
64 64 ${h.radio('default_fork_create' + suffix, c.fork_choices[0][0], label=c.fork_choices[0][1], **kwargs)}
65 <span class="help-block">${_('Permission to create root level repository forks. When disabled, users can still fork repositories inside their own repository groups.')}</span>
65 <span class="help-block">${_('Permission to create repository forks. Root level forks will only work if repository creation is enabled.')}</span>
66 66 </div>
67 67 </div>
68 68 <div class="field">
69 69 <div class="label">
70 70 <label for="default_repo_group_create${suffix}">${_('Repository Group Creation')}:</label>
71 71 </div>
72 72 <div class="radios">
73 73 ${h.radio('default_repo_group_create' + suffix, c.repo_group_create_choices[1][0], label=c.repo_group_create_choices[1][1], **kwargs)}
74 74 ${h.radio('default_repo_group_create' + suffix, c.repo_group_create_choices[0][0], label=c.repo_group_create_choices[0][1], **kwargs)}
75 75 <span class="help-block">${_('Permission to create root level repository groups. When disabled, repository group admins can still create repository subgroups within their repository groups.')}</span>
76 76 </div>
77 77 </div>
78 78 <div class="field">
79 79 <div class="label">
80 80 <label for="default_user_group_create${suffix}">${_('User Group Creation')}:</label>
81 81 </div>
82 82 <div class="radios">
83 83 ${h.radio('default_user_group_create' + suffix, c.user_group_create_choices[1][0], label=c.user_group_create_choices[1][1], **kwargs)}
84 84 ${h.radio('default_user_group_create' + suffix, c.user_group_create_choices[0][0], label=c.user_group_create_choices[0][1], **kwargs)}
85 85 <span class="help-block">${_('Permission to allow user group creation.')}</span>
86 86 </div>
87 87 </div>
88 88
89 89 <div class="field">
90 90 <div class="label">
91 91 <label for="default_inherit_default_permissions${suffix}">${_('Inherit Permissions From The Default User')}:</label>
92 92 </div>
93 93 <div class="radios">
94 94 ${h.radio('default_inherit_default_permissions' + suffix, c.inherit_default_permission_choices[1][0], label=c.inherit_default_permission_choices[1][1], **kwargs)}
95 95 ${h.radio('default_inherit_default_permissions' + suffix, c.inherit_default_permission_choices[0][0], label=c.inherit_default_permission_choices[0][1], **kwargs)}
96 96 <span class="help-block">${_('Inherit default permissions from the default user. Turn off this option to force explicit permissions for users, even if they are more restrictive than the default user permissions.')}</span>
97 97 </div>
98 98 </div>
99 99
100 100 <div class="buttons">
101 101 ${h.submit('save',_('Save'),class_="btn")}
102 102 ${h.reset('reset',_('Reset'),class_="btn")}
103 103 </div>
104 104 </div>
105 105 </div>
106 106 </div>
107 107 </%def>
108 108
109 109 <%def name="default_perms_box(form_url)">
110 110 ${h.secure_form(form_url, request=request)}
111 111 <div class="form">
112 112 <div class="fields">
113 113 <div class="field panel panel-default panel-body">
114 114 <div class="label label-checkbox">
115 115 <label for="inherit_default_permissions">${_('Inherit from default settings')}:</label>
116 116 </div>
117 117 <div class="checkboxes">
118 118 ${h.checkbox('inherit_default_permissions',value=True)}
119 119 <span class="help-block">
120 120 ${h.literal(_('Select to inherit permissions from %s permissions settings, '
121 121 'including default IP address whitelist and inheritance of \npermission by members of user groups.')
122 122 % h.link_to('default user', h.route_path('admin_permissions_global')))}
123 123 </span>
124 124 </div>
125 125 </div>
126 126
127 127 ## INHERITED permissions == the user permissions in admin
128 128 ## if inherit checkbox is set this is displayed in non-edit mode
129 129 <div class="inherit_overlay_default">
130 130 ${default_perms_radios(global_permissions_template = False, suffix='_inherited', disabled="disabled")}
131 131 </div>
132 132
133 133 ## CUSTOM permissions
134 134 <div class="inherit_overlay">
135 135 ${default_perms_radios(global_permissions_template = False)}
136 136 </div>
137 137 </div>
138 138 </div>
139 139 ${h.end_form()}
140 140
141 141
142 142 ## JS
143 143 <script>
144 144 var show_custom_perms = function(inherit_default){
145 145 if(inherit_default) {
146 146 $('.inherit_overlay_default').show();
147 147 $('.inherit_overlay').hide();
148 148 }
149 149 else {
150 150 $('.inherit_overlay').show();
151 151 $('.inherit_overlay_default').hide();
152 152 }
153 153 };
154 154 $(document).ready(function(e){
155 155 var inherit_checkbox = $('#inherit_default_permissions');
156 156 var defaults = inherit_checkbox.prop('checked');
157 157 show_custom_perms(defaults);
158 158 inherit_checkbox.on('change', function(){
159 159 if($(this).prop('checked')){
160 160 show_custom_perms(true);
161 161 }
162 162 else{
163 163 show_custom_perms(false);
164 164 }
165 165 })
166 166 })
167 167 </script>
168 168
169 169 </%def>
@@ -1,88 +1,88 b''
1 1 ## Changesets table !
2 2 <%namespace name="base" file="/base/base.mako"/>
3 3
4 4 %if c.ancestor:
5 5 <div class="ancestor">${_('Compare was calculated based on this common ancestor commit')}:
6 6 <a href="${h.route_path('repo_commit', repo_name=c.repo_name, commit_id=c.ancestor)}">${h.short_id(c.ancestor)}</a>
7 7 <input id="common_ancestor" type="hidden" name="common_ancestor" value="${c.ancestor}">
8 8 </div>
9 9 %endif
10 10
11 11 <div class="container">
12 12 <input type="hidden" name="__start__" value="revisions:sequence">
13 13 <table class="rctable compare_view_commits">
14 14 <tr>
15 15 % if hasattr(c, 'commit_versions'):
16 16 <th>ver</th>
17 17 % endif
18 18 <th>${_('Time')}</th>
19 19 <th>${_('Author')}</th>
20 20 <th>${_('Commit')}</th>
21 21 <th></th>
22 22 <th>${_('Description')}</th>
23 23 </tr>
24 24 ## to speed up lookups cache some functions before the loop
25 25 <%
26 26 active_patterns = h.get_active_pattern_entries(c.repo_name)
27 urlify_commit_message = h.partial(h.urlify_commit_message, active_pattern_entries=active_patterns, issues_container=getattr(c, 'referenced_commit_issues', None))
27 urlify_commit_message = h.partial(h.urlify_commit_message, active_pattern_entries=active_patterns)
28 28 %>
29 29
30 30 %for commit in c.commit_ranges:
31 31 <tr id="row-${commit.raw_id}"
32 32 commit_id="${commit.raw_id}"
33 33 class="compare_select"
34 34 style="${'display: none' if c.collapse_all_commits else ''}"
35 35 >
36 36 % if hasattr(c, 'commit_versions'):
37 37 <td class="tooltip" title="${_('Pull request version this commit was introduced')}">
38 38 <code>${('v{}'.format(c.commit_versions[commit.raw_id][0]) if c.commit_versions[commit.raw_id] else 'latest')}</code>
39 39 </td>
40 40 % endif
41 41 <td class="td-time">
42 42 ${h.age_component(commit.date)}
43 43 </td>
44 44 <td class="td-user">
45 45 ${base.gravatar_with_user(commit.author, 16, tooltip=True)}
46 46 </td>
47 47 <td class="td-hash">
48 48 <code>
49 49 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
50 50 r${commit.idx}:${h.short_id(commit.raw_id)}
51 51 </a>
52 52 ${h.hidden('revisions',commit.raw_id)}
53 53 </code>
54 54 </td>
55 55 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_('Expand commit message')}" onclick="commitsController.expandCommit(this); return false">
56 56 <i class="icon-expand-linked"></i>
57 57 </td>
58 58 <td class="mid td-description">
59 59 <div class="log-container truncate-wrap">
60 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${urlify_commit_message(commit.message, c.repo_name)}</div>
60 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${urlify_commit_message(commit.message, c.repo_name, issues_container_callback=getattr(c, 'referenced_commit_issues', h.IssuesRegistry())(commit.serialize()))}</div>
61 61 </div>
62 62 </td>
63 63 </tr>
64 64 %endfor
65 65 <tr class="compare_select_hidden" style="${('' if c.collapse_all_commits else 'display: none')}">
66 66 <td colspan="5">
67 67 ${_ungettext('{} commit hidden, click expand to show them.', '{} commits hidden, click expand to show them.', len(c.commit_ranges)).format(len(c.commit_ranges))}
68 68 </td>
69 69 </tr>
70 70 % if not c.commit_ranges:
71 71 <tr class="compare_select">
72 72 <td colspan="5">
73 73 ${_('No commits in this compare')}
74 74 </td>
75 75 </tr>
76 76 % endif
77 77 </table>
78 78 <input type="hidden" name="__end__" value="revisions:sequence">
79 79
80 80 </div>
81 81
82 82 <script>
83 83 commitsController = new CommitsController();
84 84 $('.compare_select').on('click',function(e){
85 85 var cid = $(this).attr('commit_id');
86 86 $('#row-'+cid).toggleClass('hl', !$('#row-'+cid).hasClass('hl'));
87 87 });
88 88 </script>
@@ -1,492 +1,499 b''
1 1 ## DATA TABLE RE USABLE ELEMENTS
2 2 ## usage:
3 3 ## <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
4 4 <%namespace name="base" file="/base/base.mako"/>
5 5
6 6 <%def name="metatags_help()">
7 7 <table>
8 8 <%
9 9 example_tags = [
10 10 ('state','[stable]'),
11 11 ('state','[stale]'),
12 12 ('state','[featured]'),
13 13 ('state','[dev]'),
14 14 ('state','[dead]'),
15 15 ('state','[deprecated]'),
16 16
17 17 ('label','[personal]'),
18 18 ('generic','[v2.0.0]'),
19 19
20 20 ('lang','[lang =&gt; JavaScript]'),
21 21 ('license','[license =&gt; LicenseName]'),
22 22
23 23 ('ref','[requires =&gt; RepoName]'),
24 24 ('ref','[recommends =&gt; GroupName]'),
25 25 ('ref','[conflicts =&gt; SomeName]'),
26 26 ('ref','[base =&gt; SomeName]'),
27 27 ('url','[url =&gt; [linkName](https://rhodecode.com)]'),
28 28 ('see','[see =&gt; http://rhodecode.com]'),
29 29 ]
30 30 %>
31 31 % for tag_type, tag in example_tags:
32 32 <tr>
33 33 <td>${tag|n}</td>
34 34 <td>${h.style_metatag(tag_type, tag)|n}</td>
35 35 </tr>
36 36 % endfor
37 37 </table>
38 38 </%def>
39 39
40 40 <%def name="render_description(description, stylify_metatags)">
41 41 <%
42 42 tags = []
43 43 if stylify_metatags:
44 44 tags, description = h.extract_metatags(description)
45 45 %>
46 46 % for tag_type, tag in tags:
47 47 ${h.style_metatag(tag_type, tag)|n,trim}
48 48 % endfor
49 49 <code style="white-space: pre-wrap">${description}</code>
50 50 </%def>
51 51
52 52 ## REPOSITORY RENDERERS
53 53 <%def name="quick_menu(repo_name)">
54 54 <i class="icon-more"></i>
55 55 <div class="menu_items_container hidden">
56 56 <ul class="menu_items">
57 57 <li>
58 58 <a title="${_('Summary')}" href="${h.route_path('repo_summary',repo_name=repo_name)}">
59 59 <span>${_('Summary')}</span>
60 60 </a>
61 61 </li>
62 62 <li>
63 63 <a title="${_('Commits')}" href="${h.route_path('repo_commits',repo_name=repo_name)}">
64 64 <span>${_('Commits')}</span>
65 65 </a>
66 66 </li>
67 67 <li>
68 68 <a title="${_('Files')}" href="${h.route_path('repo_files:default_commit',repo_name=repo_name)}">
69 69 <span>${_('Files')}</span>
70 70 </a>
71 71 </li>
72 72 <li>
73 73 <a title="${_('Fork')}" href="${h.route_path('repo_fork_new',repo_name=repo_name)}">
74 74 <span>${_('Fork')}</span>
75 75 </a>
76 76 </li>
77 77 </ul>
78 78 </div>
79 79 </%def>
80 80
81 81 <%def name="repo_name(name,rtype,rstate,private,archived,fork_of,short_name=False,admin=False)">
82 82 <%
83 83 def get_name(name,short_name=short_name):
84 84 if short_name:
85 85 return name.split('/')[-1]
86 86 else:
87 87 return name
88 88 %>
89 89 <div class="${'repo_state_pending' if rstate == 'repo_state_pending' else ''} truncate">
90 90 ##NAME
91 91 <a href="${h.route_path('edit_repo',repo_name=name) if admin else h.route_path('repo_summary',repo_name=name)}">
92 92
93 93 ##TYPE OF REPO
94 94 %if h.is_hg(rtype):
95 95 <span title="${_('Mercurial repository')}"><i class="icon-hg" style="font-size: 14px;"></i></span>
96 96 %elif h.is_git(rtype):
97 97 <span title="${_('Git repository')}"><i class="icon-git" style="font-size: 14px"></i></span>
98 98 %elif h.is_svn(rtype):
99 99 <span title="${_('Subversion repository')}"><i class="icon-svn" style="font-size: 14px"></i></span>
100 100 %endif
101 101
102 102 ##PRIVATE/PUBLIC
103 103 %if private is True and c.visual.show_private_icon:
104 104 <i class="icon-lock" title="${_('Private repository')}"></i>
105 105 %elif private is False and c.visual.show_public_icon:
106 106 <i class="icon-unlock-alt" title="${_('Public repository')}"></i>
107 107 %else:
108 108 <span></span>
109 109 %endif
110 110 ${get_name(name)}
111 111 </a>
112 112 %if fork_of:
113 113 <a href="${h.route_path('repo_summary',repo_name=fork_of.repo_name)}"><i class="icon-code-fork"></i></a>
114 114 %endif
115 115 %if rstate == 'repo_state_pending':
116 116 <span class="creation_in_progress tooltip" title="${_('This repository is being created in a background task')}">
117 117 (${_('creating...')})
118 118 </span>
119 119 %endif
120 120
121 121 </div>
122 122 </%def>
123 123
124 124 <%def name="repo_desc(description, stylify_metatags)">
125 125 <%
126 126 tags, description = h.extract_metatags(description)
127 127 %>
128 128
129 129 <div class="truncate-wrap">
130 130 % if stylify_metatags:
131 131 % for tag_type, tag in tags:
132 132 ${h.style_metatag(tag_type, tag)|n}
133 133 % endfor
134 134 % endif
135 135 ${description}
136 136 </div>
137 137
138 138 </%def>
139 139
140 140 <%def name="last_change(last_change)">
141 141 ${h.age_component(last_change, time_is_local=True)}
142 142 </%def>
143 143
144 144 <%def name="revision(repo_name, rev, commit_id, author, last_msg, commit_date)">
145 145 <div>
146 146 %if rev >= 0:
147 147 <code><a class="tooltip-hovercard" data-hovercard-alt=${h.tooltip(last_msg)} data-hovercard-url="${h.route_path('hovercard_repo_commit', repo_name=repo_name, commit_id=commit_id)}" href="${h.route_path('repo_commit',repo_name=repo_name,commit_id=commit_id)}">${'r{}:{}'.format(rev,h.short_id(commit_id))}</a></code>
148 148 %else:
149 149 ${_('No commits yet')}
150 150 %endif
151 151 </div>
152 152 </%def>
153 153
154 154 <%def name="rss(name)">
155 155 %if c.rhodecode_user.username != h.DEFAULT_USER:
156 156 <a title="${h.tooltip(_('Subscribe to %s rss feed')% name)}" href="${h.route_path('rss_feed_home', repo_name=name, _query=dict(auth_token=c.rhodecode_user.feed_token))}"><i class="icon-rss-sign"></i></a>
157 157 %else:
158 158 <a title="${h.tooltip(_('Subscribe to %s rss feed')% name)}" href="${h.route_path('rss_feed_home', repo_name=name)}"><i class="icon-rss-sign"></i></a>
159 159 %endif
160 160 </%def>
161 161
162 162 <%def name="atom(name)">
163 163 %if c.rhodecode_user.username != h.DEFAULT_USER:
164 164 <a title="${h.tooltip(_('Subscribe to %s atom feed')% name)}" href="${h.route_path('atom_feed_home', repo_name=name, _query=dict(auth_token=c.rhodecode_user.feed_token))}"><i class="icon-rss-sign"></i></a>
165 165 %else:
166 166 <a title="${h.tooltip(_('Subscribe to %s atom feed')% name)}" href="${h.route_path('atom_feed_home', repo_name=name)}"><i class="icon-rss-sign"></i></a>
167 167 %endif
168 168 </%def>
169 169
170 170 <%def name="repo_actions(repo_name, super_user=True)">
171 171 <div>
172 172 <div class="grid_edit">
173 173 <a href="${h.route_path('edit_repo',repo_name=repo_name)}" title="${_('Edit')}">
174 174 Edit
175 175 </a>
176 176 </div>
177 177 <div class="grid_delete">
178 178 ${h.secure_form(h.route_path('edit_repo_advanced_delete', repo_name=repo_name), request=request)}
179 179 <input class="btn btn-link btn-danger" id="remove_${repo_name}" name="remove_${repo_name}"
180 180 onclick="submitConfirm(event, this, _gettext('Confirm to delete this repository'), _gettext('Delete'), '${repo_name}')"
181 181 type="submit" value="Delete"
182 182 >
183 183 ${h.end_form()}
184 184 </div>
185 185 </div>
186 186 </%def>
187 187
188 188 <%def name="repo_state(repo_state)">
189 189 <div>
190 190 %if repo_state == 'repo_state_pending':
191 191 <div class="tag tag4">${_('Creating')}</div>
192 192 %elif repo_state == 'repo_state_created':
193 193 <div class="tag tag1">${_('Created')}</div>
194 194 %else:
195 195 <div class="tag alert2" title="${h.tooltip(repo_state)}">invalid</div>
196 196 %endif
197 197 </div>
198 198 </%def>
199 199
200 200
201 201 ## REPO GROUP RENDERERS
202 202 <%def name="quick_repo_group_menu(repo_group_name)">
203 203 <i class="icon-more"></i>
204 204 <div class="menu_items_container hidden">
205 205 <ul class="menu_items">
206 206 <li>
207 207 <a href="${h.route_path('repo_group_home', repo_group_name=repo_group_name)}">${_('Summary')}</a>
208 208 </li>
209 209
210 210 </ul>
211 211 </div>
212 212 </%def>
213 213
214 214 <%def name="repo_group_name(repo_group_name, children_groups=None)">
215 215 <div>
216 216 <a href="${h.route_path('repo_group_home', repo_group_name=repo_group_name)}">
217 217 <i class="icon-repo-group" title="${_('Repository group')}" style="font-size: 14px"></i>
218 218 %if children_groups:
219 219 ${h.literal(' &raquo; '.join(children_groups))}
220 220 %else:
221 221 ${repo_group_name}
222 222 %endif
223 223 </a>
224 224 </div>
225 225 </%def>
226 226
227 227 <%def name="repo_group_desc(description, personal, stylify_metatags)">
228 228
229 229 <%
230 230 if stylify_metatags:
231 231 tags, description = h.extract_metatags(description)
232 232 %>
233 233
234 234 <div class="truncate-wrap">
235 235 % if personal:
236 236 <div class="metatag" tag="personal">${_('personal')}</div>
237 237 % endif
238 238
239 239 % if stylify_metatags:
240 240 % for tag_type, tag in tags:
241 241 ${h.style_metatag(tag_type, tag)|n}
242 242 % endfor
243 243 % endif
244 244 ${description}
245 245 </div>
246 246
247 247 </%def>
248 248
249 249 <%def name="repo_group_actions(repo_group_id, repo_group_name, gr_count)">
250 250 <div class="grid_edit">
251 251 <a href="${h.route_path('edit_repo_group',repo_group_name=repo_group_name)}" title="${_('Edit')}">Edit</a>
252 252 </div>
253 253 <div class="grid_delete">
254 254 ${h.secure_form(h.route_path('edit_repo_group_advanced_delete', repo_group_name=repo_group_name), request=request)}
255 255 <input class="btn btn-link btn-danger" id="remove_${repo_group_name}" name="remove_${repo_group_name}"
256 256 onclick="submitConfirm(event, this, _gettext('Confirm to delete this repository group'), _gettext('Delete'), '${_ungettext('`{}` with {} repository','`{}` with {} repositories',gr_count).format(repo_group_name, gr_count)}')"
257 257 type="submit" value="Delete"
258 258 >
259 259 ${h.end_form()}
260 260 </div>
261 261 </%def>
262 262
263 263
264 264 <%def name="user_actions(user_id, username)">
265 265 <div class="grid_edit">
266 266 <a href="${h.route_path('user_edit',user_id=user_id)}" title="${_('Edit')}">
267 267 ${_('Edit')}
268 268 </a>
269 269 </div>
270 270 <div class="grid_delete">
271 271 ${h.secure_form(h.route_path('user_delete', user_id=user_id), request=request)}
272 272 <input class="btn btn-link btn-danger" id="remove_user_${user_id}" name="remove_user_${user_id}"
273 273 onclick="submitConfirm(event, this, _gettext('Confirm to delete this user'), _gettext('Delete'), '${username}')"
274 274 type="submit" value="Delete"
275 275 >
276 276 ${h.end_form()}
277 277 </div>
278 278 </%def>
279 279
280 280 <%def name="user_group_actions(user_group_id, user_group_name)">
281 281 <div class="grid_edit">
282 282 <a href="${h.route_path('edit_user_group', user_group_id=user_group_id)}" title="${_('Edit')}">Edit</a>
283 283 </div>
284 284 <div class="grid_delete">
285 285 ${h.secure_form(h.route_path('user_groups_delete', user_group_id=user_group_id), request=request)}
286 286 <input class="btn btn-link btn-danger" id="remove_group_${user_group_id}" name="remove_group_${user_group_id}"
287 287 onclick="submitConfirm(event, this, _gettext('Confirm to delete this user group'), _gettext('Delete'), '${user_group_name}')"
288 288 type="submit" value="Delete"
289 289 >
290 290 ${h.end_form()}
291 291 </div>
292 292 </%def>
293 293
294 294
295 295 <%def name="user_name(user_id, username)">
296 296 ${h.link_to(h.person(username, 'username_or_name_or_email'), h.route_path('user_edit', user_id=user_id))}
297 297 </%def>
298 298
299 299 <%def name="user_profile(username)">
300 300 ${base.gravatar_with_user(username, 16, tooltip=True)}
301 301 </%def>
302 302
303 303 <%def name="user_group_name(user_group_name)">
304 304 <div>
305 305 <i class="icon-user-group" title="${_('User group')}"></i>
306 306 ${h.link_to_group(user_group_name)}
307 307 </div>
308 308 </%def>
309 309
310 310
311 311 ## GISTS
312 312
313 313 <%def name="gist_gravatar(full_contact)">
314 314 <div class="gist_gravatar">
315 315 ${base.gravatar(full_contact, 30)}
316 316 </div>
317 317 </%def>
318 318
319 319 <%def name="gist_access_id(gist_access_id, full_contact)">
320 320 <div>
321 321 <code>
322 322 <a href="${h.route_path('gist_show', gist_id=gist_access_id)}">${gist_access_id}</a>
323 323 </code>
324 324 </div>
325 325 </%def>
326 326
327 327 <%def name="gist_author(full_contact, created_on, expires)">
328 328 ${base.gravatar_with_user(full_contact, 16, tooltip=True)}
329 329 </%def>
330 330
331 331
332 332 <%def name="gist_created(created_on)">
333 333 <div class="created">
334 334 ${h.age_component(created_on, time_is_local=True)}
335 335 </div>
336 336 </%def>
337 337
338 338 <%def name="gist_expires(expires)">
339 339 <div class="created">
340 340 %if expires == -1:
341 341 ${_('never')}
342 342 %else:
343 343 ${h.age_component(h.time_to_utcdatetime(expires))}
344 344 %endif
345 345 </div>
346 346 </%def>
347 347
348 348 <%def name="gist_type(gist_type)">
349 349 %if gist_type == 'public':
350 350 <span class="tag tag-gist-public disabled">${_('Public Gist')}</span>
351 351 %else:
352 352 <span class="tag tag-gist-private disabled">${_('Private Gist')}</span>
353 353 %endif
354 354 </%def>
355 355
356 356 <%def name="gist_description(gist_description)">
357 357 ${gist_description}
358 358 </%def>
359 359
360 360
361 361 ## PULL REQUESTS GRID RENDERERS
362 362
363 363 <%def name="pullrequest_target_repo(repo_name)">
364 364 <div class="truncate">
365 365 ${h.link_to(repo_name,h.route_path('repo_summary',repo_name=repo_name))}
366 366 </div>
367 367 </%def>
368 368
369 369 <%def name="pullrequest_status(status)">
370 370 <i class="icon-circle review-status-${status}"></i>
371 371 </%def>
372 372
373 373 <%def name="pullrequest_title(title, description)">
374 374 ${title}
375 375 </%def>
376 376
377 377 <%def name="pullrequest_comments(comments_nr)">
378 378 <i class="icon-comment"></i> ${comments_nr}
379 379 </%def>
380 380
381 381 <%def name="pullrequest_name(pull_request_id, state, is_wip, target_repo_name, short=False)">
382 382 <code>
383 383 <a href="${h.route_path('pullrequest_show',repo_name=target_repo_name,pull_request_id=pull_request_id)}">
384 384 % if short:
385 385 !${pull_request_id}
386 386 % else:
387 387 ${_('Pull request !{}').format(pull_request_id)}
388 388 % endif
389 389 </a>
390 390 </code>
391 391 % if state not in ['created']:
392 392 <span class="tag tag-merge-state-${state} tooltip" title="Pull request state is changing">${state}</span>
393 393 % endif
394 394
395 395 % if is_wip:
396 396 <span class="tag tooltip" title="${_('Work in progress')}">wip</span>
397 397 % endif
398 398 </%def>
399 399
400 400 <%def name="pullrequest_updated_on(updated_on, pr_version=None)">
401 401 % if pr_version:
402 402 <code>v${pr_version}</code>
403 403 % endif
404 404 ${h.age_component(h.time_to_utcdatetime(updated_on))}
405 405 </%def>
406 406
407 407 <%def name="pullrequest_author(full_contact)">
408 408 ${base.gravatar_with_user(full_contact, 16, tooltip=True)}
409 409 </%def>
410 410
411 411
412 412 ## ARTIFACT RENDERERS
413 413 <%def name="repo_artifact_name(repo_name, file_uid, artifact_display_name)">
414 414 <a href="${h.route_path('repo_artifacts_get', repo_name=repo_name, uid=file_uid)}">
415 415 ${artifact_display_name or '_EMPTY_NAME_'}
416 416 </a>
417 417 </%def>
418 418
419 <%def name="repo_artifact_admin_name(file_uid, artifact_display_name)">
420 <a href="${h.route_path('admin_artifacts_show_info', uid=file_uid)}">
421 ${(artifact_display_name or '_EMPTY_NAME_')}
422 </a>
423 </%def>
424
419 425 <%def name="repo_artifact_uid(repo_name, file_uid)">
420 426 <code>${h.shorter(file_uid, size=24, prefix=True)}</code>
421 427 </%def>
422 428
423 429 <%def name="repo_artifact_sha256(artifact_sha256)">
424 430 <div class="code">${h.shorter(artifact_sha256, 12)}</div>
425 431 </%def>
426 432
427 433 <%def name="repo_artifact_actions(repo_name, file_store_id, file_uid)">
428 434 ## <div class="grid_edit">
429 435 ## <a href="#Edit" title="${_('Edit')}">${_('Edit')}</a>
430 436 ## </div>
431 437 <div class="grid_edit">
432 438 <a href="${h.route_path('repo_artifacts_info', repo_name=repo_name, uid=file_store_id)}" title="${_('Info')}">${_('Info')}</a>
433 439 </div>
434 440 % if h.HasRepoPermissionAny('repository.admin')(c.repo_name):
435 441 <div class="grid_delete">
436 442 ${h.secure_form(h.route_path('repo_artifacts_delete', repo_name=repo_name, uid=file_store_id), request=request)}
437 443 <input class="btn btn-link btn-danger" id="remove_artifact_${file_store_id}" name="remove_artifact_${file_store_id}"
438 444 onclick="submitConfirm(event, this, _gettext('Confirm to delete this artifact'), _gettext('Delete'), '${file_uid}')"
439 445 type="submit" value="${_('Delete')}"
440 446 >
441 447 ${h.end_form()}
442 448 </div>
443 449 % endif
444 450 </%def>
445 451
452
446 453 <%def name="markup_form(form_id, form_text='', help_text=None)">
447 454
448 455 <div class="markup-form">
449 456 <div class="markup-form-area">
450 457 <div class="markup-form-area-header">
451 458 <ul class="nav-links clearfix">
452 459 <li class="active">
453 460 <a href="#edit-text" tabindex="-1" id="edit-btn_${form_id}">${_('Write')}</a>
454 461 </li>
455 462 <li class="">
456 463 <a href="#preview-text" tabindex="-1" id="preview-btn_${form_id}">${_('Preview')}</a>
457 464 </li>
458 465 </ul>
459 466 </div>
460 467
461 468 <div class="markup-form-area-write" style="display: block;">
462 469 <div id="edit-container_${form_id}" style="margin-top: -1px">
463 470 <textarea id="${form_id}" name="${form_id}" class="comment-block-ta ac-input">${form_text if form_text else ''}</textarea>
464 471 </div>
465 472 <div id="preview-container_${form_id}" class="clearfix" style="display: none;">
466 473 <div id="preview-box_${form_id}" class="preview-box"></div>
467 474 </div>
468 475 </div>
469 476
470 477 <div class="markup-form-area-footer">
471 478 <div class="toolbar">
472 479 <div class="toolbar-text">
473 480 ${(_('Parsed using %s syntax') % (
474 481 ('<a href="%s">%s</a>' % (h.route_url('%s_help' % c.visual.default_renderer), c.visual.default_renderer.upper())),
475 482 )
476 483 )|n}
477 484 </div>
478 485 </div>
479 486 </div>
480 487 </div>
481 488
482 489 <div class="markup-form-footer">
483 490 % if help_text:
484 491 <span class="help-block">${help_text}</span>
485 492 % endif
486 493 </div>
487 494 </div>
488 495 <script type="text/javascript">
489 496 new MarkupForm('${form_id}');
490 497 </script>
491 498
492 499 </%def>
@@ -1,278 +1,278 b''
1 1 <%text>
2 2 <div style="display: none">
3 3
4 4 <script>
5 5 var CG = new ColorGenerator();
6 6 </script>
7 7
8 8 <script id="ejs_gravatarWithUser" type="text/template" class="ejsTemplate">
9 9
10 10 <%
11 11 if (size > 16) {
12 12 var gravatar_class = 'gravatar gravatar-large';
13 13 } else {
14 14 var gravatar_class = 'gravatar';
15 15 }
16 16
17 17 if (tooltip) {
18 18 var gravatar_class = gravatar_class + ' tooltip-hovercard';
19 19 }
20 20
21 21 var data_hovercard_alt = username;
22 22
23 23 %>
24 24
25 25 <%
26 26 if (show_disabled) {
27 27 var user_cls = 'user user-disabled';
28 28 } else {
29 29 var user_cls = 'user';
30 30 }
31 31 var data_hovercard_url = pyroutes.url('hovercard_user', {"user_id": user_id})
32 32 %>
33 33
34 34 <div class="rc-user">
35 35 <img class="<%= gravatar_class %>" height="<%= size %>" width="<%= size %>" data-hovercard-url="<%= data_hovercard_url %>" data-hovercard-alt="<%= data_hovercard_alt %>" src="<%- gravatar_url -%>">
36 36 <span class="<%= user_cls %>"> <%- user_link -%> </span>
37 37 </div>
38 38
39 39 </script>
40 40
41 41 <script id="ejs_reviewMemberEntry" type="text/template" class="ejsTemplate">
42 42 <%
43 43 if (create) {
44 44 var edit_visibility = 'visible';
45 45 } else {
46 46 var edit_visibility = 'hidden';
47 47 }
48 48
49 49 if (member.user_group && member.user_group.vote_rule) {
50 50 var reviewGroup = '<i class="icon-user-group"></i>';
51 51 var reviewGroupColor = CG.asRGB(CG.getColor(member.user_group.vote_rule));
52 52 } else {
53 53 var reviewGroup = null;
54 54 var reviewGroupColor = 'transparent';
55 55 }
56 56 var rule_show = rule_show || false;
57 57
58 58 if (rule_show) {
59 59 var rule_visibility = 'table-cell';
60 60 } else {
61 61 var rule_visibility = 'none';
62 62 }
63 63
64 64 %>
65 65
66 66 <tr id="reviewer_<%= member.user_id %>" class="reviewer_entry" tooltip="Review Group" data-reviewer-user-id="<%= member.user_id %>">
67 67
68 68 <% if (create) { %>
69 69 <td style="width: 1px"></td>
70 70 <% } else { %>
71 71 <td style="width: 20px">
72 72 <div class="tooltip presence-state" style="display: none; position: absolute; left: 2px" title="This users is currently at this page">
73 73 <i class="icon-eye" style="color: #0ac878"></i>
74 74 </div>
75 75 <% if (role === 'reviewer') { %>
76 76 <div class="reviewer_status tooltip" title="<%= review_status_label %>">
77 77 <i class="icon-circle review-status-<%= review_status %>"></i>
78 78 </div>
79 79 <% } else if (role === 'observer') { %>
80 80 <div class="tooltip" title="Observer without voting right.">
81 81 <i class="icon-circle-thin"></i>
82 82 </div>
83 83 <% } %>
84 84 </td>
85 85 <% } %>
86 86
87 87
88 88 <% if (mandatory) { %>
89 89 <td style="text-align: right;width: 10px;">
90 90 <div class="reviewer_member_mandatory tooltip" title="Mandatory reviewer">
91 91 <i class="icon-lock"></i>
92 92 </div>
93 93 </td>
94 94
95 95 <% } else { %>
96 96 <td style="text-align: right;width: 10px;">
97 97 <% if (allowed_to_update) { %>
98 98 <div class="<%=role %>_member_remove" onclick="reviewersController.removeMember(<%= member.user_id %>, true)" style="visibility: <%= edit_visibility %>;">
99 99 <i class="icon-remove" style="color: #e85e4d;"></i>
100 100 </div>
101 101 <% } %>
102 102 </td>
103 103 <% } %>
104 104
105 105 <td>
106 106 <div id="reviewer_<%= member.user_id %>_name" class="reviewer_name">
107 107 <%-
108 108 renderTemplate('gravatarWithUser', {
109 109 'size': 16,
110 110 'show_disabled': false,
111 111 'tooltip': true,
112 112 'username': member.username,
113 113 'user_id': member.user_id,
114 114 'user_link': member.user_link,
115 115 'gravatar_url': member.gravatar_link
116 116 })
117 117 %>
118 118 </div>
119 119 <% if (reviewGroup !== null) { %>
120 120 <span class="tooltip" title="Member of review group from rule: `<%= member.user_group.name %>`" style="color: <%= reviewGroupColor %>">
121 121 <%- reviewGroup %>
122 122 </span>
123 123 <% } %>
124 124 </td>
125 125
126 126 </tr>
127 127
128 128 <tr id="reviewer_<%= member.user_id %>_rules">
129 129 <td colspan="4" style="display: <%= rule_visibility %>" class="pr-user-rule-container">
130 130 <input type="hidden" name="__start__" value="reviewer:mapping">
131 131
132 132 <%if (member.user_group && member.user_group.vote_rule) { %>
133 133 <div class="reviewer_reason">
134 134
135 135 <%if (member.user_group.vote_rule == -1) {%>
136 136 - group votes required: ALL
137 137 <%} else {%>
138 138 - group votes required: <%= member.user_group.vote_rule %>
139 139 <%}%>
140 140 </div>
141 141 <%} %>
142 142
143 143 <input type="hidden" name="__start__" value="reasons:sequence">
144 144 <% for (var i = 0; i < reasons.length; i++) { %>
145 145 <% var reason = reasons[i] %>
146 146 <div class="reviewer_reason">- <%= reason %></div>
147 147 <input type="hidden" name="reason" value="<%= reason %>">
148 148 <% } %>
149 149 <input type="hidden" name="__end__" value="reasons:sequence">
150 150
151 151 <input type="hidden" name="__start__" value="rules:sequence">
152 152 <% for (var i = 0; i < member.rules.length; i++) { %>
153 153 <% var rule = member.rules[i] %>
154 154 <input type="hidden" name="rule_id" value="<%= rule %>">
155 155 <% } %>
156 156 <input type="hidden" name="__end__" value="rules:sequence">
157 157
158 158 <input id="reviewer_<%= member.user_id %>_input" type="hidden" value="<%= member.user_id %>" name="user_id" />
159 159 <input type="hidden" name="mandatory" value="<%= mandatory %>"/>
160 160 <input type="hidden" name="role" value="<%= role %>"/>
161 161
162 162 <input type="hidden" name="__end__" value="reviewer:mapping">
163 163 </td>
164 164 </tr>
165 165
166 166 </script>
167 167
168 168 <script id="ejs_commentVersion" type="text/template" class="ejsTemplate">
169 169
170 170 <%
171 171 if (size > 16) {
172 172 var gravatar_class = 'gravatar gravatar-large';
173 173 } else {
174 174 var gravatar_class = 'gravatar';
175 175 }
176 176
177 177 %>
178 178
179 179 <%
180 180 if (show_disabled) {
181 181 var user_cls = 'user user-disabled';
182 182 } else {
183 183 var user_cls = 'user';
184 184 }
185 185
186 186 %>
187 187
188 188 <div style='line-height: 20px'>
189 189 <img style="margin: -3px 0" class="<%= gravatar_class %>" height="<%= size %>" width="<%= size %>" src="<%- gravatar_url -%>">
190 190 <strong><%- user_name -%></strong>, <code>v<%- version -%></code> edited <%- timeago_component -%>
191 191 </div>
192 192
193 193 </script>
194 194
195 195
196 196 <script id="ejs_sideBarCommentHovercard" type="text/template" class="ejsTemplate">
197 197
198 198 <div>
199 199
200 200 <% if (is_todo) { %>
201 201 <% if (inline) { %>
202 202 <strong>Inline</strong> TODO (<code>#<%- comment_id -%></code>) on line: <%= line_no %>
203 203 <% if (version_info) { %>
204 204 <%= version_info %>
205 205 <% } %>
206 206 <br/>
207 207 File: <code><%- file_name -%></code>
208 208 <% } else { %>
209 209 <% if (review_status) { %>
210 210 <i class="icon-circle review-status-<%= review_status %>"></i>
211 211 <% } %>
212 212 <strong>General</strong> TODO (<code>#<%- comment_id -%></code>)
213 213 <% if (version_info) { %>
214 214 <%= version_info %>
215 215 <% } %>
216 216 <% } %>
217 217 <% } else { %>
218 218 <% if (inline) { %>
219 219 <strong>Inline</strong> comment (<code>#<%- comment_id -%></code>) on line: <%= line_no %>
220 220 <% if (version_info) { %>
221 221 <%= version_info %>
222 222 <% } %>
223 223 <br/>
224 File: <code><%- file_name -%></code>
224 File: <code><%= file_name -%></code>
225 225 <% } else { %>
226 226 <% if (review_status) { %>
227 227 <i class="icon-circle review-status-<%= review_status %>"></i>
228 228 <% } %>
229 229 <strong>General</strong> comment (<code>#<%- comment_id -%></code>)
230 230 <% if (version_info) { %>
231 231 <%= version_info %>
232 232 <% } %>
233 233 <% } %>
234 234 <% } %>
235 235 <br/>
236 236 Created:
237 237 <time class="timeago" title="<%= created_on %>" datetime="<%= datetime %>"><%= $.timeago(datetime) %></time>
238 238
239 239 <% if (is_todo) { %>
240 240 <div style="text-align: center; padding-top: 5px">
241 241 <a class="btn btn-sm" href="#resolveTodo<%- comment_id -%>" onclick="Rhodecode.comments.resolveTodo(this, '<%- comment_id -%>'); return false">
242 242 <strong>Resolve TODO</strong>
243 243 </a>
244 244 </div>
245 245 <% } %>
246 246
247 247 </div>
248 248
249 249 </script>
250 250
251 251 <script id="ejs_commentHelpHovercard" type="text/template" class="ejsTemplate">
252 252
253 253 <div>
254 254 Use <strong>@username</strong> mention syntax to send direct notification to this RhodeCode user.<br/>
255 255 Typing / starts autocomplete for certain action, e.g set review status, or comment type. <br/>
256 256 <br/>
257 257 Use <strong>Cmd/ctrl+enter</strong> to submit comment, or <strong>Shift+Cmd/ctrl+enter</strong> to submit a draft.<br/>
258 258 <br/>
259 259 <strong>Draft comments</strong> are private to the author, and trigger no notification to others.<br/>
260 260 They are permanent until deleted, or converted to regular comments.<br/>
261 261 <br/>
262 262 <br/>
263 263 </div>
264 264
265 265 </script>
266 266
267 267
268 268
269 269 ##// END OF EJS Templates
270 270 </div>
271 271
272 272
273 273 <script>
274 274 // registers the templates into global cache
275 275 registerTemplates();
276 276 </script>
277 277
278 278 </%text>
@@ -1,1052 +1,1055 b''
1 1 <%inherit file="/base/base.mako"/>
2 2 <%namespace name="base" file="/base/base.mako"/>
3 3 <%namespace name="dt" file="/data_table/_dt_elements.mako"/>
4 4 <%namespace name="sidebar" file="/base/sidebar.mako"/>
5 5
6 6
7 7 <%def name="title()">
8 8 ${_('{} Pull Request !{}').format(c.repo_name, c.pull_request.pull_request_id)}
9 9 %if c.rhodecode_name:
10 10 &middot; ${h.branding(c.rhodecode_name)}
11 11 %endif
12 12 </%def>
13 13
14 14 <%def name="breadcrumbs_links()">
15 15
16 16 </%def>
17 17
18 18 <%def name="menu_bar_nav()">
19 19 ${self.menu_items(active='repositories')}
20 20 </%def>
21 21
22 22 <%def name="menu_bar_subnav()">
23 23 ${self.repo_menu(active='showpullrequest')}
24 24 </%def>
25 25
26 26
27 27 <%def name="main()">
28 28 ## Container to gather extracted Tickets
29 29 <%
30 c.referenced_commit_issues = []
31 c.referenced_desc_issues = []
30 c.referenced_commit_issues = h.IssuesRegistry()
31 c.referenced_desc_issues = h.IssuesRegistry()
32 32 %>
33 33
34 34 <script type="text/javascript">
35 35 templateContext.pull_request_data.pull_request_id = ${c.pull_request.pull_request_id};
36 36 templateContext.pull_request_data.pull_request_version = '${request.GET.get('version', '')}';
37 37 </script>
38 38
39 39 <div class="box">
40 40
41 41 <div class="box pr-summary">
42 42
43 43 <div class="summary-details block-left">
44 44 <div id="pr-title">
45 45 % if c.pull_request.is_closed():
46 46 <span class="pr-title-closed-tag tag">${_('Closed')}</span>
47 47 % endif
48 48 <input class="pr-title-input large disabled" disabled="disabled" name="pullrequest_title" type="text" value="${c.pull_request.title}">
49 49 </div>
50 50 <div id="pr-title-edit" class="input" style="display: none;">
51 51 <input class="pr-title-input large" id="pr-title-input" name="pullrequest_title" type="text" value="${c.pull_request.title}">
52 52 </div>
53 53
54 54 <% summary = lambda n:{False:'summary-short'}.get(n) %>
55 55 <div class="pr-details-title">
56 56 <div class="pull-left">
57 57 <a href="${h.route_path('pull_requests_global', pull_request_id=c.pull_request.pull_request_id)}">${_('Pull request !{}').format(c.pull_request.pull_request_id)}</a>
58 58 ${_('Created on')}
59 59 <span class="tooltip" title="${_('Last updated on')} ${h.format_date(c.pull_request.updated_on)}">${h.format_date(c.pull_request.created_on)},</span>
60 60 <span class="pr-details-title-author-pref">${_('by')}</span>
61 61 </div>
62 62
63 63 <div class="pull-left">
64 64 ${self.gravatar_with_user(c.pull_request.author.email, 16, tooltip=True)}
65 65 </div>
66 66
67 67 %if c.allowed_to_update:
68 68 <div class="pull-right">
69 69 <div id="edit_pull_request" class="action_button pr-save" style="display: none;">${_('Update title & description')}</div>
70 70 <div id="delete_pullrequest" class="action_button pr-save ${('' if c.allowed_to_delete else 'disabled' )}" style="display: none;">
71 71 % if c.allowed_to_delete:
72 72 ${h.secure_form(h.route_path('pullrequest_delete', repo_name=c.pull_request.target_repo.repo_name, pull_request_id=c.pull_request.pull_request_id), request=request)}
73 73 <input class="btn btn-link btn-danger no-margin" id="remove_${c.pull_request.pull_request_id}" name="remove_${c.pull_request.pull_request_id}"
74 74 onclick="submitConfirm(event, this, _gettext('Confirm to delete this pull request'), _gettext('Delete'), '${'!{}'.format(c.pull_request.pull_request_id)}')"
75 75 type="submit" value="${_('Delete pull request')}">
76 76 ${h.end_form()}
77 77 % else:
78 78 <span class="tooltip" title="${_('Not allowed to delete this pull request')}">${_('Delete pull request')}</span>
79 79 % endif
80 80 </div>
81 81 <div id="open_edit_pullrequest" class="action_button">${_('Edit')}</div>
82 82 <div id="close_edit_pullrequest" class="action_button" style="display: none;">${_('Cancel')}</div>
83 83 </div>
84 84
85 85 %endif
86 86 </div>
87 87
88 88 <div id="pr-desc" class="input" title="${_('Rendered using {} renderer').format(c.renderer)}">
89 ${h.render(c.pull_request.description, renderer=c.renderer, repo_name=c.repo_name, issues_container=c.referenced_desc_issues)}
89 ${h.render(c.pull_request.description, renderer=c.renderer, repo_name=c.repo_name, issues_container_callback=c.referenced_desc_issues())}
90 90 </div>
91 91
92 92 <div id="pr-desc-edit" class="input textarea" style="display: none;">
93 93 <input id="pr-renderer-input" type="hidden" name="description_renderer" value="${c.visual.default_renderer}">
94 94 ${dt.markup_form('pr-description-input', form_text=c.pull_request.description)}
95 95 </div>
96 96
97 97 <div id="summary" class="fields pr-details-content">
98 98
99 99 ## source
100 100 <div class="field">
101 101 <div class="label-pr-detail">
102 102 <label>${_('Commit flow')}:</label>
103 103 </div>
104 104 <div class="input">
105 105 <div class="pr-commit-flow">
106 106 ## Source
107 107 %if c.pull_request.source_ref_parts.type == 'branch':
108 108 <a href="${h.route_path('repo_commits', repo_name=c.pull_request.source_repo.repo_name, _query=dict(branch=c.pull_request.source_ref_parts.name))}"><code class="pr-source-info">${c.pull_request.source_ref_parts.type}:${c.pull_request.source_ref_parts.name}</code></a>
109 109 %else:
110 110 <code class="pr-source-info">${'{}:{}'.format(c.pull_request.source_ref_parts.type, c.pull_request.source_ref_parts.name)}</code>
111 111 %endif
112 112 ${_('of')} <a href="${h.route_path('repo_summary', repo_name=c.pull_request.source_repo.repo_name)}">${c.pull_request.source_repo.repo_name}</a>
113 113 &rarr;
114 114 ## Target
115 115 %if c.pull_request.target_ref_parts.type == 'branch':
116 116 <a href="${h.route_path('repo_commits', repo_name=c.pull_request.target_repo.repo_name, _query=dict(branch=c.pull_request.target_ref_parts.name))}"><code class="pr-target-info">${c.pull_request.target_ref_parts.type}:${c.pull_request.target_ref_parts.name}</code></a>
117 117 %else:
118 118 <code class="pr-target-info">${'{}:{}'.format(c.pull_request.target_ref_parts.type, c.pull_request.target_ref_parts.name)}</code>
119 119 %endif
120 120
121 121 ${_('of')} <a href="${h.route_path('repo_summary', repo_name=c.pull_request.target_repo.repo_name)}">${c.pull_request.target_repo.repo_name}</a>
122 122
123 123 <a class="source-details-action" href="#expand-source-details" onclick="return toggleElement(this, '.source-details')" data-toggle-on='<i class="icon-angle-down">more details</i>' data-toggle-off='<i class="icon-angle-up">less details</i>'>
124 124 <i class="icon-angle-down">more details</i>
125 125 </a>
126 126
127 127 </div>
128 128
129 129 <div class="source-details" style="display: none">
130 130
131 131 <ul>
132 132
133 133 ## common ancestor
134 134 <li>
135 135 ${_('Common ancestor')}:
136 136 % if c.ancestor_commit:
137 137 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=c.ancestor_commit.raw_id)}">${h.show_id(c.ancestor_commit)}</a>
138 138 % else:
139 139 ${_('not available')}
140 140 % endif
141 141 </li>
142 142
143 143 ## pull url
144 144 <li>
145 145 %if h.is_hg(c.pull_request.source_repo):
146 146 <% clone_url = 'hg pull -r {} {}'.format(h.short_id(c.source_ref), c.pull_request.source_repo.clone_url()) %>
147 147 %elif h.is_git(c.pull_request.source_repo):
148 148 <% clone_url = 'git pull {} {}'.format(c.pull_request.source_repo.clone_url(), c.pull_request.source_ref_parts.name) %>
149 149 %endif
150 150
151 151 <span>${_('Pull changes from source')}</span>: <input type="text" class="input-monospace pr-pullinfo" value="${clone_url}" readonly="readonly">
152 152 <i class="tooltip icon-clipboard clipboard-action pull-right pr-pullinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the pull url')}"></i>
153 153 </li>
154 154
155 155 ## Shadow repo
156 156 <li>
157 157 % if not c.pull_request.is_closed() and c.pull_request.shadow_merge_ref:
158 158 %if h.is_hg(c.pull_request.target_repo):
159 159 <% clone_url = 'hg clone --update {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
160 160 %elif h.is_git(c.pull_request.target_repo):
161 161 <% clone_url = 'git clone --branch {} {} pull-request-{}'.format(c.pull_request.shadow_merge_ref.name, c.shadow_clone_url, c.pull_request.pull_request_id) %>
162 162 %endif
163 163
164 164 <span class="tooltip" title="${_('Clone repository in its merged state using shadow repository')}">${_('Clone from shadow repository')}</span>: <input type="text" class="input-monospace pr-mergeinfo" value="${clone_url}" readonly="readonly">
165 165 <i class="tooltip icon-clipboard clipboard-action pull-right pr-mergeinfo-copy" data-clipboard-text="${clone_url}" title="${_('Copy the clone url')}"></i>
166 166
167 167 % else:
168 168 <div class="">
169 169 ${_('Shadow repository data not available')}.
170 170 </div>
171 171 % endif
172 172 </li>
173 173
174 174 </ul>
175 175
176 176 </div>
177 177
178 178 </div>
179 179
180 180 </div>
181 181
182 182 ## versions
183 183 <div class="field">
184 184 <div class="label-pr-detail">
185 185 <label>${_('Versions')}:</label>
186 186 </div>
187 187
188 188 <% outdated_comm_count_ver = len(c.inline_versions[None]['outdated']) %>
189 189 <% general_outdated_comm_count_ver = len(c.comment_versions[None]['outdated']) %>
190 190
191 191 <div class="pr-versions">
192 192 % if c.show_version_changes:
193 193 <% outdated_comm_count_ver = len(c.inline_versions[c.at_version_num]['outdated']) %>
194 194 <% general_outdated_comm_count_ver = len(c.comment_versions[c.at_version_num]['outdated']) %>
195 195 ${_ungettext('{} version available for this pull request, ', '{} versions available for this pull request, ', len(c.versions)).format(len(c.versions))}
196 196 <a id="show-pr-versions" onclick="return versionController.toggleVersionView(this)" href="#show-pr-versions"
197 197 data-toggle-on="${_('show versions')}."
198 198 data-toggle-off="${_('hide versions')}.">
199 199 ${_('show versions')}.
200 200 </a>
201 201 <table>
202 202 ## SHOW ALL VERSIONS OF PR
203 203 <% ver_pr = None %>
204 204
205 205 % for data in reversed(list(enumerate(c.versions, 1))):
206 206 <% ver_pos = data[0] %>
207 207 <% ver = data[1] %>
208 208 <% ver_pr = ver.pull_request_version_id %>
209 209 <% display_row = '' if c.at_version and (c.at_version_num == ver_pr or c.from_version_num == ver_pr) else 'none' %>
210 210
211 211 <tr class="version-pr" style="display: ${display_row}">
212 212 <td>
213 213 <code>
214 214 <a href="${request.current_route_path(_query=dict(version=ver_pr or 'latest'))}">v${ver_pos}</a>
215 215 </code>
216 216 </td>
217 217 <td>
218 218 <input ${('checked="checked"' if c.from_version_index == ver_pr else '')} class="compare-radio-button" type="radio" name="ver_source" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
219 219 <input ${('checked="checked"' if c.at_version_num == ver_pr else '')} class="compare-radio-button" type="radio" name="ver_target" value="${ver_pr or 'latest'}" data-ver-pos="${ver_pos}"/>
220 220 </td>
221 221 <td>
222 222 <% review_status = c.review_versions[ver_pr].status if ver_pr in c.review_versions else 'not_reviewed' %>
223 223 <i class="tooltip icon-circle review-status-${review_status}" title="${_('Your review status at this version')}"></i>
224 224
225 225 </td>
226 226 <td>
227 227 % if c.at_version_num != ver_pr:
228 228 <i class="tooltip icon-comment" title="${_('Comments from pull request version v{0}').format(ver_pos)}"></i>
229 229 <code>
230 230 General:${len(c.comment_versions[ver_pr]['at'])} / Inline:${len(c.inline_versions[ver_pr]['at'])}
231 231 </code>
232 232 % endif
233 233 </td>
234 234 <td>
235 235 ##<code>${ver.source_ref_parts.commit_id[:6]}</code>
236 236 </td>
237 237 <td>
238 238 <code>${h.age_component(ver.updated_on, time_is_local=True, tooltip=False)}</code>
239 239 </td>
240 240 </tr>
241 241 % endfor
242 242
243 243 <tr>
244 244 <td colspan="6">
245 245 <button id="show-version-diff" onclick="return versionController.showVersionDiff()" class="btn btn-sm" style="display: none"
246 246 data-label-text-locked="${_('select versions to show changes')}"
247 247 data-label-text-diff="${_('show changes between versions')}"
248 248 data-label-text-show="${_('show pull request for this version')}"
249 249 >
250 250 ${_('select versions to show changes')}
251 251 </button>
252 252 </td>
253 253 </tr>
254 254 </table>
255 255 % else:
256 256 <div>
257 257 ${_('Pull request versions not available')}.
258 258 </div>
259 259 % endif
260 260 </div>
261 261 </div>
262 262
263 263 </div>
264 264
265 265 </div>
266 266
267 267
268 268 </div>
269 269
270 270 </div>
271 271
272 272 <div class="box">
273 273
274 274 % if c.state_progressing:
275 275
276 276 <h2 style="text-align: center">
277 277 ${_('Cannot show diff when pull request state is changing. Current progress state')}: <span class="tag tag-merge-state-${c.pull_request.state}">${c.pull_request.state}</span>
278 278
279 279 % if c.is_super_admin:
280 280 <br/>
281 281 If you think this is an error try <a href="${h.current_route_path(request, force_state='created')}">forced state reset</a> to <span class="tag tag-merge-state-created">created</span> state.
282 282 % endif
283 283 </h2>
284 284
285 285 % else:
286 286
287 287 ## Diffs rendered here
288 288 <div class="table" >
289 289 <div id="changeset_compare_view_content">
290 290 ##CS
291 291 % if c.missing_requirements:
292 292 <div class="box">
293 293 <div class="alert alert-warning">
294 294 <div>
295 295 <strong>${_('Missing requirements:')}</strong>
296 296 ${_('These commits cannot be displayed, because this repository uses the Mercurial largefiles extension, which was not enabled.')}
297 297 </div>
298 298 </div>
299 299 </div>
300 300 % elif c.missing_commits:
301 301 <div class="box">
302 302 <div class="alert alert-warning">
303 303 <div>
304 304 <strong>${_('Missing commits')}:</strong>
305 305 ${_('This pull request cannot be displayed, because one or more commits no longer exist in the source repository.')}<br/>
306 306 ${_('Please update this pull request, push the commits back into the source repository, or consider closing this pull request.')}<br/>
307 307 ${_('Consider doing a `force update commits` in case you think this is an error.')}
308 308 </div>
309 309 </div>
310 310 </div>
311 311 % elif c.pr_merge_source_commit.changed and not c.pull_request.is_closed():
312 312 <div class="box">
313 313 <div class="alert alert-info">
314 314 <div>
315 315 <strong>${_('There are new changes for `{}:{}` in source repository, please consider updating this pull request.').format(c.pr_merge_source_commit.ref_spec.type, c.pr_merge_source_commit.ref_spec.name)}</strong>
316 316 </div>
317 317 </div>
318 318 </div>
319 319 % endif
320 320
321 321 <div class="compare_view_commits_title">
322 322 % if not c.compare_mode:
323 323
324 324 % if c.at_version_index:
325 325 <h4>
326 326 ${_('Showing changes at v{}, commenting is disabled.').format(c.at_version_index)}
327 327 </h4>
328 328 % endif
329 329
330 330 <div class="pull-left">
331 331 <div class="btn-group">
332 332 <a class="${('collapsed' if c.collapse_all_commits else '')}" href="#expand-commits" onclick="toggleCommitExpand(this); return false" data-toggle-commits-cnt=${len(c.commit_ranges)} >
333 333 % if c.collapse_all_commits:
334 334 <i class="icon-plus-squared-alt icon-no-margin"></i>
335 335 ${_ungettext('Expand {} commit', 'Expand {} commits', len(c.commit_ranges)).format(len(c.commit_ranges))}
336 336 % else:
337 337 <i class="icon-minus-squared-alt icon-no-margin"></i>
338 338 ${_ungettext('Collapse {} commit', 'Collapse {} commits', len(c.commit_ranges)).format(len(c.commit_ranges))}
339 339 % endif
340 340 </a>
341 341 </div>
342 342 </div>
343 343
344 344 <div class="pull-right">
345 345 % if c.allowed_to_update and not c.pull_request.is_closed():
346 346
347 347 <div class="btn-group btn-group-actions">
348 348 <a id="update_commits" class="btn btn-primary no-margin" onclick="updateController.updateCommits(this); return false">
349 349 ${_('Update commits')}
350 350 </a>
351 351
352 352 <a id="update_commits_switcher" class="tooltip btn btn-primary btn-more-option" data-toggle="dropdown" aria-pressed="false" role="button" title="${_('more update options')}">
353 353 <i class="icon-down"></i>
354 354 </a>
355 355
356 356 <div class="btn-action-switcher-container right-align" id="update-commits-switcher">
357 357 <ul class="btn-action-switcher" role="menu" style="min-width: 300px;">
358 358 <li>
359 359 <a href="#forceUpdate" onclick="updateController.forceUpdateCommits(this); return false">
360 360 ${_('Force update commits')}
361 361 </a>
362 362 <div class="action-help-block">
363 363 ${_('Update commits and force refresh this pull request.')}
364 364 </div>
365 365 </li>
366 366 </ul>
367 367 </div>
368 368 </div>
369 369
370 370 % else:
371 371 <a class="tooltip btn disabled pull-right" disabled="disabled" title="${_('Update is disabled for current view')}">${_('Update commits')}</a>
372 372 % endif
373 373
374 374 </div>
375 375 % endif
376 376 </div>
377 377
378 378 % if not c.missing_commits:
379 379 ## COMPARE RANGE DIFF MODE
380 380 % if c.compare_mode:
381 381 % if c.at_version:
382 382 <h4>
383 383 ${_('Commits and changes between v{ver_from} and {ver_to} of this pull request, commenting is disabled').format(ver_from=c.from_version_index, ver_to=c.at_version_index if c.at_version_index else 'latest')}:
384 384 </h4>
385 385
386 386 <div class="subtitle-compare">
387 387 ${_('commits added: {}, removed: {}').format(len(c.commit_changes_summary.added), len(c.commit_changes_summary.removed))}
388 388 </div>
389 389
390 390 <div class="container">
391 391 <table class="rctable compare_view_commits">
392 392 <tr>
393 393 <th></th>
394 394 <th>${_('Time')}</th>
395 395 <th>${_('Author')}</th>
396 396 <th>${_('Commit')}</th>
397 397 <th></th>
398 398 <th>${_('Description')}</th>
399 399 </tr>
400 400
401 401 % for c_type, commit in c.commit_changes:
402 402 % if c_type in ['a', 'r']:
403 403 <%
404 404 if c_type == 'a':
405 405 cc_title = _('Commit added in displayed changes')
406 406 elif c_type == 'r':
407 407 cc_title = _('Commit removed in displayed changes')
408 408 else:
409 409 cc_title = ''
410 410 %>
411 411 <tr id="row-${commit.raw_id}" commit_id="${commit.raw_id}" class="compare_select">
412 412 <td>
413 413 <div class="commit-change-indicator color-${c_type}-border">
414 414 <div class="commit-change-content color-${c_type} tooltip" title="${h.tooltip(cc_title)}">
415 415 ${c_type.upper()}
416 416 </div>
417 417 </div>
418 418 </td>
419 419 <td class="td-time">
420 420 ${h.age_component(commit.date)}
421 421 </td>
422 422 <td class="td-user">
423 423 ${base.gravatar_with_user(commit.author, 16, tooltip=True)}
424 424 </td>
425 425 <td class="td-hash">
426 426 <code>
427 427 <a href="${h.route_path('repo_commit', repo_name=c.target_repo.repo_name, commit_id=commit.raw_id)}">
428 428 r${commit.idx}:${h.short_id(commit.raw_id)}
429 429 </a>
430 430 ${h.hidden('revisions', commit.raw_id)}
431 431 </code>
432 432 </td>
433 433 <td class="td-message expand_commit" data-commit-id="${commit.raw_id}" title="${_( 'Expand commit message')}" onclick="commitsController.expandCommit(this); return false">
434 434 <i class="icon-expand-linked"></i>
435 435 </td>
436 436 <td class="mid td-description">
437 437 <div class="log-container truncate-wrap">
438 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${h.urlify_commit_message(commit.message, c.repo_name, issues_container=c.referenced_commit_issues)}</div>
438 <div class="message truncate" id="c-${commit.raw_id}" data-message-raw="${commit.message}">${h.urlify_commit_message(commit.message, c.repo_name, issues_container_callback=c.referenced_commit_issues(commit.serialize()))}</div>
439 439 </div>
440 440 </td>
441 441 </tr>
442 442 % endif
443 443 % endfor
444 444 </table>
445 445 </div>
446 446
447 447 % endif
448 448
449 449 ## Regular DIFF
450 450 % else:
451 451 <%include file="/compare/compare_commits.mako" />
452 452 % endif
453 453
454 454 <div class="cs_files">
455 455 <%namespace name="cbdiffs" file="/codeblocks/diffs.mako"/>
456 456
457 457 <%
458 458 pr_menu_data = {
459 459 'outdated_comm_count_ver': outdated_comm_count_ver,
460 460 'pull_request': c.pull_request
461 461 }
462 462 %>
463 463
464 464 ${cbdiffs.render_diffset_menu(c.diffset, range_diff_on=c.range_diff_on, pull_request_menu=pr_menu_data)}
465 465
466 466 % if c.range_diff_on:
467 467 % for commit in c.commit_ranges:
468 468 ${cbdiffs.render_diffset(
469 469 c.changes[commit.raw_id],
470 470 commit=commit, use_comments=True,
471 471 collapse_when_files_over=5,
472 472 disable_new_comments=True,
473 473 deleted_files_comments=c.deleted_files_comments,
474 474 inline_comments=c.inline_comments,
475 475 pull_request_menu=pr_menu_data, show_todos=False)}
476 476 % endfor
477 477 % else:
478 478 ${cbdiffs.render_diffset(
479 479 c.diffset, use_comments=True,
480 480 collapse_when_files_over=30,
481 481 disable_new_comments=not c.allowed_to_comment,
482 482 deleted_files_comments=c.deleted_files_comments,
483 483 inline_comments=c.inline_comments,
484 484 pull_request_menu=pr_menu_data, show_todos=False)}
485 485 % endif
486 486
487 487 </div>
488 488 % else:
489 489 ## skipping commits we need to clear the view for missing commits
490 490 <div style="clear:both;"></div>
491 491 % endif
492 492
493 493 </div>
494 494 </div>
495 495
496 496 ## template for inline comment form
497 497 <%namespace name="comment" file="/changeset/changeset_file_comment.mako"/>
498 498
499 499 ## comments heading with count
500 500 <div class="comments-heading">
501 501 <i class="icon-comment"></i>
502 502 ${_('General Comments')} ${len(c.comments)}
503 503 </div>
504 504
505 505 ## render general comments
506 506 <div id="comment-tr-show">
507 507 % if general_outdated_comm_count_ver:
508 508 <div class="info-box">
509 509 % if general_outdated_comm_count_ver == 1:
510 510 ${_('there is {num} general comment from older versions').format(num=general_outdated_comm_count_ver)},
511 511 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show it')}</a>
512 512 % else:
513 513 ${_('there are {num} general comments from older versions').format(num=general_outdated_comm_count_ver)},
514 514 <a href="#show-hidden-comments" onclick="$('.comment-general.comment-outdated').show(); $(this).parent().hide(); return false;">${_('show them')}</a>
515 515 % endif
516 516 </div>
517 517 % endif
518 518 </div>
519 519
520 520 ${comment.generate_comments(c.comments, include_pull_request=True, is_pull_request=True)}
521 521
522 522 % if not c.pull_request.is_closed():
523 523 ## main comment form and it status
524 524 ${comment.comments(h.route_path('pullrequest_comment_create', repo_name=c.repo_name,
525 525 pull_request_id=c.pull_request.pull_request_id),
526 526 c.pull_request_review_status,
527 527 is_pull_request=True, change_status=c.allowed_to_change_status)}
528 528
529 529 ## merge status, and merge action
530 530 <div class="pull-request-merge">
531 531 <%include file="/pullrequests/pullrequest_merge_checks.mako"/>
532 532 </div>
533 533
534 534 %endif
535 535
536 536 % endif
537 537 </div>
538 538
539 539
540 540 ### NAV SIDEBAR
541 541 <aside class="right-sidebar right-sidebar-expanded" id="pr-nav-sticky" style="display: none">
542 542 <div class="sidenav navbar__inner" >
543 543 ## TOGGLE
544 544 <div class="sidebar-toggle" onclick="toggleSidebar(); return false">
545 545 <a href="#toggleSidebar" class="grey-link-action">
546 546
547 547 </a>
548 548 </div>
549 549
550 550 ## CONTENT
551 551 <div class="sidebar-content">
552 552
553 553 ## Drafts
554 554 % if c.rhodecode_edition_id == 'EE':
555 555 <div id="draftsTable" class="sidebar-element clear-both" style="display: ${'block' if c.draft_comments else 'none'}">
556 556 <div class="tooltip right-sidebar-collapsed-state" style="display: none;" onclick="toggleSidebar(); return false" title="${_('Drafts')}">
557 557 <i class="icon-comment icon-draft"></i>
558 558 <span id="drafts-count">${len(c.draft_comments)}</span>
559 559 </div>
560 560
561 561 <div class="right-sidebar-expanded-state pr-details-title">
562 562 <span style="padding-left: 2px">
563 563 <input name="select_all_drafts" type="checkbox" onclick="$('[name=submit_draft]').prop('checked', !$('[name=submit_draft]').prop('checked'))">
564 564 </span>
565 565 <span class="sidebar-heading noselect" onclick="refreshDraftComments(); return false">
566 566 <i class="icon-comment icon-draft"></i>
567 567 ${_('Drafts')}
568 568 </span>
569 569 <span class="block-right action_button last-item" onclick="submitDrafts(event)">${_('Submit')}</span>
570 570 </div>
571 571
572 572 <div id="drafts" class="right-sidebar-expanded-state pr-details-content reviewers">
573 573 % if c.draft_comments:
574 574 ${sidebar.comments_table(c.draft_comments, len(c.draft_comments), draft_comments=True)}
575 575 % else:
576 576 <table class="drafts-content-table">
577 577 <tr>
578 578 <td>
579 579 ${_('No TODOs yet')}
580 580 </td>
581 581 </tr>
582 582 </table>
583 583 % endif
584 584 </div>
585 585
586 586 </div>
587 587 % endif
588 588
589 589 ## RULES SUMMARY/RULES
590 590 <div class="sidebar-element clear-both">
591 591 <% vote_title = _ungettext(
592 592 'Status calculated based on votes from {} reviewer',
593 593 'Status calculated based on votes from {} reviewers', c.reviewers_count).format(c.reviewers_count)
594 594 %>
595 595
596 596 <div class="tooltip right-sidebar-collapsed-state" style="display: none" onclick="toggleSidebar(); return false" title="${vote_title}">
597 597 <i class="icon-circle review-status-${c.pull_request_review_status}"></i>
598 598 ${c.reviewers_count}
599 599 </div>
600 600
601 601 ## REVIEWERS
602 602 <div class="right-sidebar-expanded-state pr-details-title">
603 603 <span class="tooltip sidebar-heading" title="${vote_title}">
604 604 <i class="icon-circle review-status-${c.pull_request_review_status}"></i>
605 605 ${_('Reviewers')}
606 606 </span>
607 607
608 608 %if c.allowed_to_update:
609 609 <span id="open_edit_reviewers" class="block-right action_button last-item">${_('Edit')}</span>
610 610 <span id="close_edit_reviewers" class="block-right action_button last-item" style="display: none;">${_('Close')}</span>
611 611 %else:
612 612 <span id="open_edit_reviewers" class="block-right action_button last-item">${_('Show rules')}</span>
613 613 <span id="close_edit_reviewers" class="block-right action_button last-item" style="display: none;">${_('Close')}</span>
614 614 %endif
615 615 </div>
616 616
617 617 <div id="reviewers" class="right-sidebar-expanded-state pr-details-content reviewers">
618 618
619 619 <div id="review_rules" style="display: none" class="">
620 620
621 621 <strong>${_('Reviewer rules')}</strong>
622 622 <div class="pr-reviewer-rules">
623 623 ## review rules will be appended here, by default reviewers logic
624 624 </div>
625 625 <input id="review_data" type="hidden" name="review_data" value="">
626 626 </div>
627 627
628 628 ## members redering block
629 629 <input type="hidden" name="__start__" value="review_members:sequence">
630 630
631 631 <table id="review_members" class="group_members">
632 632 ## This content is loaded via JS and ReviewersPanel
633 633 </table>
634 634
635 635 <input type="hidden" name="__end__" value="review_members:sequence">
636 636 ## end members redering block
637 637
638 638 %if not c.pull_request.is_closed():
639 639 <div id="add_reviewer" class="ac" style="display: none;">
640 640 %if c.allowed_to_update:
641 641 % if not c.forbid_adding_reviewers:
642 642 <div id="add_reviewer_input" class="reviewer_ac" style="width: 240px">
643 643 <input class="ac-input" id="user" name="user" placeholder="${_('Add reviewer or reviewer group')}" type="text" autocomplete="off">
644 644 <div id="reviewers_container"></div>
645 645 </div>
646 646 % endif
647 647 <div class="pull-right" style="margin-bottom: 15px">
648 648 <button data-role="reviewer" id="update_reviewers" class="btn btn-sm no-margin">${_('Save Changes')}</button>
649 649 </div>
650 650 %endif
651 651 </div>
652 652 %endif
653 653 </div>
654 654 </div>
655 655
656 656 ## OBSERVERS
657 657 % if c.rhodecode_edition_id == 'EE':
658 658 <div class="sidebar-element clear-both">
659 659 <% vote_title = _ungettext(
660 660 '{} observer without voting right.',
661 661 '{} observers without voting right.', c.observers_count).format(c.observers_count)
662 662 %>
663 663
664 664 <div class="tooltip right-sidebar-collapsed-state" style="display: none" onclick="toggleSidebar(); return false" title="${vote_title}">
665 665 <i class="icon-circle-thin"></i>
666 666 ${c.observers_count}
667 667 </div>
668 668
669 669 <div class="right-sidebar-expanded-state pr-details-title">
670 670 <span class="tooltip sidebar-heading" title="${vote_title}">
671 671 <i class="icon-circle-thin"></i>
672 672 ${_('Observers')}
673 673 </span>
674 674 %if c.allowed_to_update:
675 675 <span id="open_edit_observers" class="block-right action_button last-item">${_('Edit')}</span>
676 676 <span id="close_edit_observers" class="block-right action_button last-item" style="display: none;">${_('Close')}</span>
677 677 %endif
678 678 </div>
679 679
680 680 <div id="observers" class="right-sidebar-expanded-state pr-details-content reviewers">
681 681 ## members redering block
682 682 <input type="hidden" name="__start__" value="observer_members:sequence">
683 683
684 684 <table id="observer_members" class="group_members">
685 685 ## This content is loaded via JS and ReviewersPanel
686 686 </table>
687 687
688 688 <input type="hidden" name="__end__" value="observer_members:sequence">
689 689 ## end members redering block
690 690
691 691 %if not c.pull_request.is_closed():
692 692 <div id="add_observer" class="ac" style="display: none;">
693 693 %if c.allowed_to_update:
694 694 % if not c.forbid_adding_reviewers or 1:
695 695 <div id="add_reviewer_input" class="reviewer_ac" style="width: 240px" >
696 696 <input class="ac-input" id="observer" name="observer" placeholder="${_('Add observer or observer group')}" type="text" autocomplete="off">
697 697 <div id="observers_container"></div>
698 698 </div>
699 699 % endif
700 700 <div class="pull-right" style="margin-bottom: 15px">
701 701 <button data-role="observer" id="update_observers" class="btn btn-sm no-margin">${_('Save Changes')}</button>
702 702 </div>
703 703 %endif
704 704 </div>
705 705 %endif
706 706 </div>
707 707 </div>
708 708 % endif
709 709
710 710 ## TODOs
711 711 <div id="todosTable" class="sidebar-element clear-both">
712 712 <div class="tooltip right-sidebar-collapsed-state" style="display: none" onclick="toggleSidebar(); return false" title="TODOs">
713 713 <i class="icon-flag-filled"></i>
714 714 <span id="todos-count">${len(c.unresolved_comments)}</span>
715 715 </div>
716 716
717 717 <div class="right-sidebar-expanded-state pr-details-title">
718 718 ## Only show unresolved, that is only what matters
719 719 <span class="sidebar-heading noselect" onclick="refreshTODOs(); return false">
720 720 <i class="icon-flag-filled"></i>
721 721 TODOs
722 722 </span>
723 723
724 724 % if not c.at_version:
725 725 % if c.resolved_comments:
726 726 <span class="block-right action_button last-item noselect" onclick="$('.unresolved-todo-text').toggle(); return toggleElement(this, '.resolved-todo');" data-toggle-on="Show resolved" data-toggle-off="Hide resolved">Show resolved</span>
727 727 % else:
728 728 <span class="block-right last-item noselect">Show resolved</span>
729 729 % endif
730 730 % endif
731 731 </div>
732 732
733 733 <div class="right-sidebar-expanded-state pr-details-content">
734 734
735 735 % if c.at_version:
736 736 <table>
737 737 <tr>
738 738 <td class="unresolved-todo-text">${_('TODOs unavailable when browsing versions')}.</td>
739 739 </tr>
740 740 </table>
741 741 % else:
742 742 % if c.unresolved_comments + c.resolved_comments:
743 743 ${sidebar.comments_table(c.unresolved_comments + c.resolved_comments, len(c.unresolved_comments), todo_comments=True)}
744 744 % else:
745 745 <table class="todos-content-table">
746 746 <tr>
747 747 <td>
748 748 ${_('No TODOs yet')}
749 749 </td>
750 750 </tr>
751 751 </table>
752 752 % endif
753 753 % endif
754 754 </div>
755 755 </div>
756 756
757 757 ## COMMENTS
758 758 <div id="commentsTable" class="sidebar-element clear-both">
759 759 <div class="tooltip right-sidebar-collapsed-state" style="display: none" onclick="toggleSidebar(); return false" title="${_('Comments')}">
760 760 <i class="icon-comment" style="color: #949494"></i>
761 761 <span id="comments-count">${len(c.inline_comments_flat+c.comments)}</span>
762 762 <span class="display-none" id="general-comments-count">${len(c.comments)}</span>
763 763 <span class="display-none" id="inline-comments-count">${len(c.inline_comments_flat)}</span>
764 764 </div>
765 765
766 766 <div class="right-sidebar-expanded-state pr-details-title">
767 767 <span class="sidebar-heading noselect" onclick="refreshComments(); return false">
768 768 <i class="icon-comment" style="color: #949494"></i>
769 769 ${_('Comments')}
770 770
771 771 ## % if outdated_comm_count_ver:
772 772 ## <a href="#" onclick="showOutdated(); Rhodecode.comments.nextOutdatedComment(); return false;">
773 773 ## (${_("{} Outdated").format(outdated_comm_count_ver)})
774 774 ## </a>
775 775 ## <a href="#" class="showOutdatedComments" onclick="showOutdated(this); return false;"> | ${_('show outdated')}</a>
776 776 ## <a href="#" class="hideOutdatedComments" style="display: none" onclick="hideOutdated(this); return false;"> | ${_('hide outdated')}</a>
777 777
778 778 ## % else:
779 779 ## (${_("{} Outdated").format(outdated_comm_count_ver)})
780 780 ## % endif
781 781
782 782 </span>
783 783
784 784 % if outdated_comm_count_ver:
785 785 <span class="block-right action_button last-item noselect" onclick="return toggleElement(this, '.hidden-comment');" data-toggle-on="Show outdated" data-toggle-off="Hide outdated">Show outdated</span>
786 786 % else:
787 787 <span class="block-right last-item noselect">Show hidden</span>
788 788 % endif
789 789
790 790 </div>
791 791
792 792 <div class="right-sidebar-expanded-state pr-details-content">
793 793 % if c.inline_comments_flat + c.comments:
794 794 ${sidebar.comments_table(c.inline_comments_flat + c.comments, len(c.inline_comments_flat+c.comments))}
795 795 % else:
796 796 <table class="comments-content-table">
797 797 <tr>
798 798 <td>
799 799 ${_('No Comments yet')}
800 800 </td>
801 801 </tr>
802 802 </table>
803 803 % endif
804 804 </div>
805 805
806 806 </div>
807 807
808 808 ## Referenced Tickets
809 809 <div class="sidebar-element clear-both">
810 810 <div class="tooltip right-sidebar-collapsed-state" style="display: none" onclick="toggleSidebar(); return false" title="${_('Referenced Tickets')}">
811 811 <i class="icon-info-circled"></i>
812 ${(len(c.referenced_desc_issues) + len(c.referenced_commit_issues))}
812 ${(c.referenced_desc_issues.issues_unique_count + c.referenced_commit_issues.issues_unique_count)}
813 813 </div>
814 814
815 815 <div class="right-sidebar-expanded-state pr-details-title">
816 816 <span class="sidebar-heading">
817 817 <i class="icon-info-circled"></i>
818 818 ${_('Referenced Tickets')}
819 819 </span>
820 820 </div>
821 821 <div class="right-sidebar-expanded-state pr-details-content">
822 822 <table>
823 823
824 824 <tr><td><code>${_('In pull request description')}:</code></td></tr>
825 % if c.referenced_desc_issues:
826 % for ticket_dict in sorted(c.referenced_desc_issues):
825 % if c.referenced_desc_issues.issues:
826
827 % for ticket_id, ticket_dict in c.referenced_desc_issues.unique_issues.items():
827 828 <tr>
828 829 <td>
829 <a href="${ticket_dict.get('url')}">
830 ${ticket_dict.get('id')}
830 <a href="${ticket_dict[0].get('url')}">
831 ${ticket_id}
831 832 </a>
832 833 </td>
833 834 </tr>
835
834 836 % endfor
835 837 % else:
836 838 <tr>
837 839 <td>
838 840 ${_('No Ticket data found.')}
839 841 </td>
840 842 </tr>
841 843 % endif
842 844
843 845 <tr><td style="padding-top: 10px"><code>${_('In commit messages')}:</code></td></tr>
844 % if c.referenced_commit_issues:
845 % for ticket_dict in sorted(c.referenced_commit_issues):
846 % if c.referenced_commit_issues.issues:
847 % for ticket_id, ticket_dict in c.referenced_commit_issues.unique_issues.items():
846 848 <tr>
847 849 <td>
848 <a href="${ticket_dict.get('url')}">
849 ${ticket_dict.get('id')}
850 <a href="${ticket_dict[0].get('url')}">
851 ${ticket_id}
850 852 </a>
853 - ${_ungettext('in %s commit', 'in %s commits', len(ticket_dict)) % (len(ticket_dict))}
851 854 </td>
852 855 </tr>
853 856 % endfor
854 857 % else:
855 858 <tr>
856 859 <td>
857 860 ${_('No Ticket data found.')}
858 861 </td>
859 862 </tr>
860 863 % endif
861 864 </table>
862 865
863 866 </div>
864 867 </div>
865 868
866 869 </div>
867 870
868 871 </div>
869 872 </aside>
870 873
871 874 ## This JS needs to be at the end
872 875 <script type="text/javascript">
873 876
874 877 versionController = new VersionController();
875 878 versionController.init();
876 879
877 880 reviewersController = new ReviewersController();
878 881 commitsController = new CommitsController();
879 882 commentsController = new CommentsController();
880 883
881 884 updateController = new UpdatePrController();
882 885
883 886 window.reviewerRulesData = ${c.pull_request_default_reviewers_data_json | n};
884 887 window.setReviewersData = ${c.pull_request_set_reviewers_data_json | n};
885 888 window.setObserversData = ${c.pull_request_set_observers_data_json | n};
886 889
887 890 (function () {
888 891 "use strict";
889 892
890 893 // custom code mirror
891 894 var codeMirrorInstance = $('#pr-description-input').get(0).MarkupForm.cm;
892 895
893 896 PRDetails.init();
894 897 ReviewersPanel.init(reviewersController, reviewerRulesData, setReviewersData);
895 898 ObserversPanel.init(reviewersController, reviewerRulesData, setObserversData);
896 899
897 900 window.showOutdated = function (self) {
898 901 $('.comment-inline.comment-outdated').show();
899 902 $('.filediff-outdated').show();
900 903 $('.showOutdatedComments').hide();
901 904 $('.hideOutdatedComments').show();
902 905 };
903 906
904 907 window.hideOutdated = function (self) {
905 908 $('.comment-inline.comment-outdated').hide();
906 909 $('.filediff-outdated').hide();
907 910 $('.hideOutdatedComments').hide();
908 911 $('.showOutdatedComments').show();
909 912 };
910 913
911 914 window.refreshMergeChecks = function () {
912 915 var loadUrl = "${request.current_route_path(_query=dict(merge_checks=1))}";
913 916 $('.pull-request-merge').css('opacity', 0.3);
914 917 $('.action-buttons-extra').css('opacity', 0.3);
915 918
916 919 $('.pull-request-merge').load(
917 920 loadUrl, function () {
918 921 $('.pull-request-merge').css('opacity', 1);
919 922
920 923 $('.action-buttons-extra').css('opacity', 1);
921 924 }
922 925 );
923 926 };
924 927
925 928 window.submitDrafts = function (event) {
926 929 var target = $(event.currentTarget);
927 930 var callback = function (result) {
928 931 target.removeAttr('onclick').html('saving...');
929 932 }
930 933 var draftIds = [];
931 934 $.each($('[name=submit_draft]:checked'), function (idx, val) {
932 935 draftIds.push(parseInt($(val).val()));
933 936 })
934 937 if (draftIds.length > 0) {
935 938 Rhodecode.comments.finalizeDrafts(draftIds, callback);
936 939 }
937 940 else {
938 941
939 942 }
940 943 }
941 944
942 945 window.closePullRequest = function (status) {
943 946 if (!confirm(_gettext('Are you sure to close this pull request without merging?'))) {
944 947 return false;
945 948 }
946 949 // inject closing flag
947 950 $('.action-buttons-extra').append('<input type="hidden" class="close-pr-input" id="close_pull_request" value="1">');
948 951 $(generalCommentForm.statusChange).select2("val", status).trigger('change');
949 952 $(generalCommentForm.submitForm).submit();
950 953 };
951 954
952 955 //TODO this functionality is now missing
953 956 $('#show-outdated-comments').on('click', function (e) {
954 957 var button = $(this);
955 958 var outdated = $('.comment-outdated');
956 959
957 960 if (button.html() === "(Show)") {
958 961 button.html("(Hide)");
959 962 outdated.show();
960 963 } else {
961 964 button.html("(Show)");
962 965 outdated.hide();
963 966 }
964 967 });
965 968
966 969 $('#merge_pull_request_form').submit(function () {
967 970 if (!$('#merge_pull_request').attr('disabled')) {
968 971 $('#merge_pull_request').attr('disabled', 'disabled');
969 972 }
970 973 return true;
971 974 });
972 975
973 976 $('#edit_pull_request').on('click', function (e) {
974 977 var title = $('#pr-title-input').val();
975 978 var description = codeMirrorInstance.getValue();
976 979 var renderer = $('#pr-renderer-input').val();
977 980 editPullRequest(
978 981 "${c.repo_name}", "${c.pull_request.pull_request_id}",
979 982 title, description, renderer);
980 983 });
981 984
982 985 var $updateButtons = $('#update_reviewers,#update_observers');
983 986 $updateButtons.on('click', function (e) {
984 987 var role = $(this).data('role');
985 988 $updateButtons.attr('disabled', 'disabled');
986 989 $updateButtons.addClass('disabled');
987 990 $updateButtons.html(_gettext('Saving...'));
988 991 reviewersController.updateReviewers(
989 992 templateContext.repo_name,
990 993 templateContext.pull_request_data.pull_request_id,
991 994 role
992 995 );
993 996 });
994 997
995 998 // fixing issue with caches on firefox
996 999 $('#update_commits').removeAttr("disabled");
997 1000
998 1001 $('.show-inline-comments').on('click', function (e) {
999 1002 var boxid = $(this).attr('data-comment-id');
1000 1003 var button = $(this);
1001 1004
1002 1005 if (button.hasClass("comments-visible")) {
1003 1006 $('#{0} .inline-comments'.format(boxid)).each(function (index) {
1004 1007 $(this).hide();
1005 1008 });
1006 1009 button.removeClass("comments-visible");
1007 1010 } else {
1008 1011 $('#{0} .inline-comments'.format(boxid)).each(function (index) {
1009 1012 $(this).show();
1010 1013 });
1011 1014 button.addClass("comments-visible");
1012 1015 }
1013 1016 });
1014 1017
1015 1018 $('.show-inline-comments').on('change', function (e) {
1016 1019 var show = 'none';
1017 1020 var target = e.currentTarget;
1018 1021 if (target.checked) {
1019 1022 show = ''
1020 1023 }
1021 1024 var boxid = $(target).attr('id_for');
1022 1025 var comments = $('#{0} .inline-comments'.format(boxid));
1023 1026 var fn_display = function (idx) {
1024 1027 $(this).css('display', show);
1025 1028 };
1026 1029 $(comments).each(fn_display);
1027 1030 var btns = $('#{0} .inline-comments-button'.format(boxid));
1028 1031 $(btns).each(fn_display);
1029 1032 });
1030 1033
1031 1034 // register submit callback on commentForm form to track TODOs, and refresh mergeChecks conditions
1032 1035 window.commentFormGlobalSubmitSuccessCallback = function (comment) {
1033 1036 if (!comment.draft) {
1034 1037 refreshMergeChecks();
1035 1038 }
1036 1039 };
1037 1040
1038 1041 ReviewerAutoComplete('#user', reviewersController);
1039 1042 ObserverAutoComplete('#observer', reviewersController);
1040 1043
1041 1044 })();
1042 1045
1043 1046 $(document).ready(function () {
1044 1047
1045 1048 var channel = '${c.pr_broadcast_channel}';
1046 1049 new ReviewerPresenceController(channel)
1047 1050 // register globally so inject comment logic can re-use it.
1048 1051 window.commentsController = commentsController;
1049 1052 })
1050 1053 </script>
1051 1054
1052 1055 </%def>
@@ -1,125 +1,126 b''
1 1 <%inherit file="/summary/summary_base.mako"/>
2 2
3 3 <%namespace name="components" file="/summary/components.mako"/>
4 4
5 5
6 6 <%def name="menu_bar_subnav()">
7 7 ${self.repo_menu(active='summary')}
8 8 </%def>
9 9
10 10 <%def name="main()">
11 11
12 12 <div id="repo-summary" class="summary">
13 13 ${components.summary_detail(breadcrumbs_links=self.breadcrumbs_links(), show_downloads=True)}
14 14 </div><!--end repo-summary-->
15 15
16 16
17 17 <div class="box">
18 18 %if not c.repo_commits:
19 19 <div class="empty-repo">
20 20 <div class="title">
21 21 <h3>${_('Quick start')}</h3>
22 22 </div>
23 23 <div class="clear-fix"></div>
24 24 </div>
25 25 %endif
26 26 <div class="table">
27 27 <div id="shortlog_data">
28 28 <%include file='summary_commits.mako'/>
29 29 </div>
30 30 </div>
31 31 </div>
32 32
33 33 %if c.readme_data:
34 34 <div id="readme" class="anchor">
35 35 <div class="box">
36 36
37 37 <div class="readme-title" title="${h.tooltip(_('Readme file from commit %s:%s') % (c.rhodecode_db_repo.landing_ref_type, c.rhodecode_db_repo.landing_ref_name))}">
38 38 <div>
39 39 <i class="icon-file-text"></i>
40 40 <a href="${h.route_path('repo_files',repo_name=c.repo_name,commit_id=c.rhodecode_db_repo.landing_ref_name,f_path=c.readme_file)}">
41 41 ${c.readme_file}
42 42 </a>
43 43 </div>
44 44 </div>
45 45 <div class="readme codeblock">
46 46 <div class="readme_box">
47 47 ${c.readme_data|n}
48 48 </div>
49 49 </div>
50 50 </div>
51 51 </div>
52 52 %endif
53 53
54 54 <script type="text/javascript">
55 55 $(document).ready(function(){
56 56
57 57 var showCloneField = function(clone_url_format){
58 58 $.each(['http', 'http_id', 'ssh'], function (idx, val) {
59 59 if(val === clone_url_format){
60 60 $('#clone_option_' + val).show();
61 61 $('#clone_option').val(val)
62 62 } else {
63 63 $('#clone_option_' + val).hide();
64 64 }
65 65 });
66 66 };
67 67 // default taken from session
68 68 showCloneField(templateContext.session_attrs.clone_url_format);
69 69
70 70 $('#clone_option').on('change', function(e) {
71 71 var selected = $(this).val();
72 72
73 73 storeUserSessionAttr('rc_user_session_attr.clone_url_format', selected);
74 74 showCloneField(selected)
75 75 });
76 76
77 77 var initialCommitData = {
78 78 id: null,
79 79 text: '${c.rhodecode_db_repo.landing_ref_name}',
80 80 type: '${c.rhodecode_db_repo.landing_ref_type}',
81 81 raw_id: null,
82 82 files_url: null
83 83 };
84 84
85 85 select2RefSwitcher('#download_options', initialCommitData);
86 86
87 87 // on change of download options
88 88 $('#download_options').on('change', function(e) {
89 89 // format of Object {text: "v0.0.3", type: "tag", id: "rev"}
90 90 var selectedReference = e.added;
91 91 var ico = '<i class="icon-download"></i>';
92 92
93 93 $.each($('.archive_link'), function (key, val) {
94 94 var ext = $(this).data('ext');
95 95 var fname = selectedReference.raw_id + ext;
96 96 var href = pyroutes.url('repo_archivefile', {
97 97 'repo_name': templateContext.repo_name,
98 'fname': fname
98 'fname': fname,
99 'with_hash': '1'
99 100 });
100 101 // set new label
101 102 $(this).html(ico + ' {0}{1}'.format(escapeHtml(e.added.text), ext));
102 103 // set new url to button,
103 104 $(this).attr('href', href)
104 105 });
105 106
106 107 });
107 108
108 109
109 110 // calculate size of repository
110 111 calculateSize = function () {
111 112
112 113 var callback = function (data) {
113 114 % if c.show_stats:
114 115 showRepoStats('lang_stats', data);
115 116 % endif
116 117 };
117 118
118 119 showRepoSize('repo_size_container', templateContext.repo_name, templateContext.repo_landing_commit, callback);
119 120
120 121 }
121 122
122 123 })
123 124 </script>
124 125
125 126 </%def>
General Comments 1
Under Review
author

Auto status change to "Under Review"

You need to be logged in to leave comments. Login now