##// END OF EJS Templates
default-reviewers: introduce new voting rule logic that allows...
marcink -
r2484:3775edd6 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,38 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_11_0_0 as db
18
19 reviewers_table = db.PullRequestReviewers.__table__
20
21 rule_data = Column(
22 'rule_data_json',
23 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
24 rule_data.create(table=reviewers_table)
25
26 # issue fixups
27 fixups(db, meta.Session)
28
29
30 def downgrade(migrate_engine):
31 meta = MetaData()
32 meta.bind = migrate_engine
33
34
35 def fixups(models, _SESSION):
36 pass
37
38
@@ -0,0 +1,37 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_11_0_0 as db
18
19 user_group_review_table = db.RepoReviewRuleUserGroup.__table__
20
21 vote_rule = Column("vote_rule", Integer(), nullable=True,
22 default=-1)
23 vote_rule.create(table=user_group_review_table)
24
25 # issue fixups
26 fixups(db, meta.Session)
27
28
29 def downgrade(migrate_engine):
30 meta = MetaData()
31 meta.bind = migrate_engine
32
33
34 def fixups(models, _SESSION):
35 pass
36
37
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,194 +1,196 b''
1 1 {
2 2 "dirs": {
3 3 "css": {
4 4 "src":"rhodecode/public/css",
5 5 "dest":"rhodecode/public/css"
6 6 },
7 7 "js": {
8 8 "src": "rhodecode/public/js/src",
9 9 "src_rc": "rhodecode/public/js/rhodecode",
10 10 "dest": "rhodecode/public/js",
11 11 "bower": "bower_components",
12 12 "node_modules": "node_modules"
13 13 }
14 14 },
15 15 "copy": {
16 16 "main": {
17 17 "expand": true,
18 18 "cwd": "bower_components",
19 19 "src": "webcomponentsjs/webcomponents-lite.js",
20 20 "dest": "<%= dirs.js.dest %>/vendors"
21 21 }
22 22 },
23 23 "concat": {
24 24 "polymercss": {
25 25 "src": [
26 26 "<%= dirs.js.src %>/components/root-styles-prefix.html",
27 27 "<%= dirs.css.src %>/style-polymer.css",
28 28 "<%= dirs.js.src %>/components/root-styles-suffix.html"
29 29 ],
30 30 "dest": "<%= dirs.js.dest %>/src/components/root-styles.gen.html",
31 31 "nonull": true
32 32 },
33 33 "dist": {
34 34 "src": [
35 35 "<%= dirs.js.node_modules %>/jquery/dist/jquery.min.js",
36 36 "<%= dirs.js.node_modules %>/mousetrap/mousetrap.min.js",
37 37 "<%= dirs.js.node_modules %>/moment/min/moment.min.js",
38 38 "<%= dirs.js.node_modules %>/clipboard/dist/clipboard.min.js",
39 39 "<%= dirs.js.node_modules %>/favico.js/favico-0.3.10.min.js",
40 40 "<%= dirs.js.node_modules %>/appenlight-client/appenlight-client.min.js",
41 41 "<%= dirs.js.src %>/logging.js",
42 42 "<%= dirs.js.src %>/bootstrap.js",
43 43 "<%= dirs.js.src %>/i18n_utils.js",
44 44 "<%= dirs.js.src %>/deform.js",
45 "<%= dirs.js.src %>/ejs.js",
46 "<%= dirs.js.src %>/ejs_templates/utils.js",
45 47 "<%= dirs.js.src %>/plugins/jquery.pjax.js",
46 48 "<%= dirs.js.src %>/plugins/jquery.dataTables.js",
47 49 "<%= dirs.js.src %>/plugins/flavoured_checkbox.js",
48 50 "<%= dirs.js.src %>/plugins/jquery.auto-grow-input.js",
49 51 "<%= dirs.js.src %>/plugins/jquery.autocomplete.js",
50 52 "<%= dirs.js.src %>/plugins/jquery.debounce.js",
51 53 "<%= dirs.js.src %>/plugins/jquery.mark.js",
52 54 "<%= dirs.js.src %>/plugins/jquery.timeago.js",
53 55 "<%= dirs.js.src %>/plugins/jquery.timeago-extension.js",
54 56 "<%= dirs.js.src %>/select2/select2.js",
55 57 "<%= dirs.js.src %>/codemirror/codemirror.js",
56 58 "<%= dirs.js.src %>/codemirror/codemirror_loadmode.js",
57 59 "<%= dirs.js.src %>/codemirror/codemirror_hint.js",
58 60 "<%= dirs.js.src %>/codemirror/codemirror_overlay.js",
59 61 "<%= dirs.js.src %>/codemirror/codemirror_placeholder.js",
60 62 "<%= dirs.js.src %>/codemirror/codemirror_simplemode.js",
61 63 "<%= dirs.js.dest %>/mode/meta.js",
62 64 "<%= dirs.js.dest %>/mode/meta_ext.js",
63 65 "<%= dirs.js.src_rc %>/i18n/select2/translations.js",
64 66 "<%= dirs.js.src %>/rhodecode/utils/array.js",
65 67 "<%= dirs.js.src %>/rhodecode/utils/string.js",
66 68 "<%= dirs.js.src %>/rhodecode/utils/pyroutes.js",
67 69 "<%= dirs.js.src %>/rhodecode/utils/ajax.js",
68 70 "<%= dirs.js.src %>/rhodecode/utils/autocomplete.js",
69 71 "<%= dirs.js.src %>/rhodecode/utils/colorgenerator.js",
70 72 "<%= dirs.js.src %>/rhodecode/utils/ie.js",
71 73 "<%= dirs.js.src %>/rhodecode/utils/os.js",
72 74 "<%= dirs.js.src %>/rhodecode/utils/topics.js",
73 75 "<%= dirs.js.src %>/rhodecode/init.js",
74 76 "<%= dirs.js.src %>/rhodecode/changelog.js",
75 77 "<%= dirs.js.src %>/rhodecode/codemirror.js",
76 78 "<%= dirs.js.src %>/rhodecode/comments.js",
77 79 "<%= dirs.js.src %>/rhodecode/constants.js",
78 80 "<%= dirs.js.src %>/rhodecode/files.js",
79 81 "<%= dirs.js.src %>/rhodecode/followers.js",
80 82 "<%= dirs.js.src %>/rhodecode/menus.js",
81 83 "<%= dirs.js.src %>/rhodecode/notifications.js",
82 84 "<%= dirs.js.src %>/rhodecode/permissions.js",
83 85 "<%= dirs.js.src %>/rhodecode/pjax.js",
84 86 "<%= dirs.js.src %>/rhodecode/pullrequests.js",
85 87 "<%= dirs.js.src %>/rhodecode/settings.js",
86 88 "<%= dirs.js.src %>/rhodecode/select2_widgets.js",
87 89 "<%= dirs.js.src %>/rhodecode/tooltips.js",
88 90 "<%= dirs.js.src %>/rhodecode/users.js",
89 91 "<%= dirs.js.src %>/rhodecode/appenlight.js",
90 92 "<%= dirs.js.src %>/rhodecode.js"
91 93 ],
92 94 "dest": "<%= dirs.js.dest %>/scripts.js",
93 95 "nonull": true
94 96 }
95 97 },
96 98 "crisper": {
97 99 "dist": {
98 100 "options": {
99 101 "cleanup": false,
100 102 "onlySplit": true
101 103 },
102 104 "src": "<%= dirs.js.dest %>/rhodecode-components.html",
103 105 "dest": "<%= dirs.js.dest %>/rhodecode-components.js"
104 106 }
105 107 },
106 108 "less": {
107 109 "development": {
108 110 "options": {
109 111 "compress": false,
110 112 "yuicompress": false,
111 113 "optimization": 0
112 114 },
113 115 "files": {
114 116 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
115 117 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
116 118 }
117 119 },
118 120 "production": {
119 121 "options": {
120 122 "compress": true,
121 123 "yuicompress": true,
122 124 "optimization": 2
123 125 },
124 126 "files": {
125 127 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
126 128 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
127 129 }
128 130 },
129 131 "components": {
130 132 "files": [
131 133 {
132 134 "cwd": "<%= dirs.js.src %>/components/",
133 135 "dest": "<%= dirs.js.src %>/components/",
134 136 "src": [
135 137 "**/*.less"
136 138 ],
137 139 "expand": true,
138 140 "ext": ".css"
139 141 }
140 142 ]
141 143 }
142 144 },
143 145 "watch": {
144 146 "less": {
145 147 "files": [
146 148 "<%= dirs.css.src %>/**/*.less",
147 149 "<%= dirs.js.src %>/components/**/*.less"
148 150 ],
149 151 "tasks": [
150 152 "less:development",
151 153 "less:components",
152 154 "concat:polymercss",
153 155 "vulcanize",
154 156 "crisper",
155 157 "concat:dist"
156 158 ]
157 159 },
158 160 "js": {
159 161 "files": [
160 162 "!<%= dirs.js.src %>/components/root-styles.gen.html",
161 163 "<%= dirs.js.src %>/**/*.js",
162 164 "<%= dirs.js.src %>/components/**/*.html"
163 165 ],
164 166 "tasks": [
165 167 "less:components",
166 168 "concat:polymercss",
167 169 "vulcanize",
168 170 "crisper",
169 171 "concat:dist"
170 172 ]
171 173 }
172 174 },
173 175 "jshint": {
174 176 "rhodecode": {
175 177 "src": "<%= dirs.js.src %>/rhodecode/**/*.js",
176 178 "options": {
177 179 "jshintrc": ".jshintrc"
178 180 }
179 181 }
180 182 },
181 183 "vulcanize": {
182 184 "default": {
183 185 "options": {
184 186 "abspath": "",
185 187 "inlineScripts": true,
186 188 "inlineCss": true,
187 189 "stripComments": true
188 190 },
189 191 "files": {
190 192 "<%= dirs.js.dest %>/rhodecode-components.html": "<%= dirs.js.src %>/components/shared-components.html"
191 193 }
192 194 }
193 195 }
194 196 }
@@ -1,63 +1,63 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22
23 23 RhodeCode, a web based repository management software
24 24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 25 """
26 26
27 27 import os
28 28 import sys
29 29 import platform
30 30
31 31 VERSION = tuple(open(os.path.join(
32 32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 33
34 34 BACKENDS = {
35 35 'hg': 'Mercurial repository',
36 36 'git': 'Git repository',
37 37 'svn': 'Subversion repository',
38 38 }
39 39
40 40 CELERY_ENABLED = False
41 41 CELERY_EAGER = False
42 42
43 43 # link to config for pyramid
44 44 CONFIG = {}
45 45
46 46 # Populated with the settings dictionary from application init in
47 47 # rhodecode.conf.environment.load_pyramid_environment
48 48 PYRAMID_SETTINGS = {}
49 49
50 50 # Linked module for extensions
51 51 EXTENSIONS = {}
52 52
53 53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 83 # defines current db version for migrations
54 __dbversion__ = 85 # defines current db version for migrations
55 55 __platform__ = platform.system()
56 56 __license__ = 'AGPLv3, and Commercial License'
57 57 __author__ = 'RhodeCode GmbH'
58 58 __url__ = 'https://code.rhodecode.com'
59 59
60 60 is_windows = __platform__ in ['Windows']
61 61 is_unix = not is_windows
62 62 is_test = False
63 63 disable_error_handler = False
@@ -1,142 +1,142 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23 import urlobject
24 24
25 25 from rhodecode.api.tests.utils import (
26 26 build_data, api_call, assert_error, assert_ok)
27 27 from rhodecode.lib import helpers as h
28 28 from rhodecode.lib.utils2 import safe_unicode
29 29
30 30 pytestmark = pytest.mark.backends("git", "hg")
31 31
32 32
33 33 @pytest.mark.usefixtures("testuser_api", "app")
34 34 class TestGetPullRequest(object):
35 35
36 36 def test_api_get_pull_request(self, pr_util, http_host_only_stub):
37 37 from rhodecode.model.pull_request import PullRequestModel
38 38 pull_request = pr_util.create_pull_request(mergeable=True)
39 39 id_, params = build_data(
40 40 self.apikey, 'get_pull_request',
41 41 pullrequestid=pull_request.pull_request_id)
42 42
43 43 response = api_call(self.app, params)
44 44
45 45 assert response.status == '200 OK'
46 46
47 47 url_obj = urlobject.URLObject(
48 48 h.route_url(
49 49 'pullrequest_show',
50 50 repo_name=pull_request.target_repo.repo_name,
51 51 pull_request_id=pull_request.pull_request_id))
52 52
53 53 pr_url = safe_unicode(
54 54 url_obj.with_netloc(http_host_only_stub))
55 55 source_url = safe_unicode(
56 56 pull_request.source_repo.clone_url().with_netloc(http_host_only_stub))
57 57 target_url = safe_unicode(
58 58 pull_request.target_repo.clone_url().with_netloc(http_host_only_stub))
59 59 shadow_url = safe_unicode(
60 60 PullRequestModel().get_shadow_clone_url(pull_request))
61 61
62 62 expected = {
63 63 'pull_request_id': pull_request.pull_request_id,
64 64 'url': pr_url,
65 65 'title': pull_request.title,
66 66 'description': pull_request.description,
67 67 'status': pull_request.status,
68 68 'created_on': pull_request.created_on,
69 69 'updated_on': pull_request.updated_on,
70 70 'commit_ids': pull_request.revisions,
71 71 'review_status': pull_request.calculated_review_status(),
72 72 'mergeable': {
73 73 'status': True,
74 74 'message': 'This pull request can be automatically merged.',
75 75 },
76 76 'source': {
77 77 'clone_url': source_url,
78 78 'repository': pull_request.source_repo.repo_name,
79 79 'reference': {
80 80 'name': pull_request.source_ref_parts.name,
81 81 'type': pull_request.source_ref_parts.type,
82 82 'commit_id': pull_request.source_ref_parts.commit_id,
83 83 },
84 84 },
85 85 'target': {
86 86 'clone_url': target_url,
87 87 'repository': pull_request.target_repo.repo_name,
88 88 'reference': {
89 89 'name': pull_request.target_ref_parts.name,
90 90 'type': pull_request.target_ref_parts.type,
91 91 'commit_id': pull_request.target_ref_parts.commit_id,
92 92 },
93 93 },
94 94 'merge': {
95 95 'clone_url': shadow_url,
96 96 'reference': {
97 97 'name': pull_request.shadow_merge_ref.name,
98 98 'type': pull_request.shadow_merge_ref.type,
99 99 'commit_id': pull_request.shadow_merge_ref.commit_id,
100 100 },
101 101 },
102 102 'author': pull_request.author.get_api_data(include_secrets=False,
103 103 details='basic'),
104 104 'reviewers': [
105 105 {
106 106 'user': reviewer.get_api_data(include_secrets=False,
107 107 details='basic'),
108 108 'reasons': reasons,
109 109 'review_status': st[0][1].status if st else 'not_reviewed',
110 110 }
111 for reviewer, reasons, mandatory, st in
111 for obj, reviewer, reasons, mandatory, st in
112 112 pull_request.reviewers_statuses()
113 113 ]
114 114 }
115 115 assert_ok(id_, expected, response.body)
116 116
117 117 def test_api_get_pull_request_repo_error(self, pr_util):
118 118 pull_request = pr_util.create_pull_request()
119 119 id_, params = build_data(
120 120 self.apikey, 'get_pull_request',
121 121 repoid=666, pullrequestid=pull_request.pull_request_id)
122 122 response = api_call(self.app, params)
123 123
124 124 expected = 'repository `666` does not exist'
125 125 assert_error(id_, expected, given=response.body)
126 126
127 127 def test_api_get_pull_request_pull_request_error(self):
128 128 id_, params = build_data(
129 129 self.apikey, 'get_pull_request', pullrequestid=666)
130 130 response = api_call(self.app, params)
131 131
132 132 expected = 'pull request `666` does not exist'
133 133 assert_error(id_, expected, given=response.body)
134 134
135 135 def test_api_get_pull_request_pull_request_error_just_pr_id(self):
136 136 id_, params = build_data(
137 137 self.apikey, 'get_pull_request',
138 138 pullrequestid=666)
139 139 response = api_call(self.app, params)
140 140
141 141 expected = 'pull request `666` does not exist'
142 142 assert_error(id_, expected, given=response.body)
@@ -1,213 +1,213 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.vcs.nodes import FileNode
24 24 from rhodecode.model.db import User
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 27 from rhodecode.api.tests.utils import (
28 28 build_data, api_call, assert_ok, assert_error)
29 29
30 30
31 31 @pytest.mark.usefixtures("testuser_api", "app")
32 32 class TestUpdatePullRequest(object):
33 33
34 34 @pytest.mark.backends("git", "hg")
35 35 def test_api_update_pull_request_title_or_description(
36 36 self, pr_util, no_notifications):
37 37 pull_request = pr_util.create_pull_request()
38 38
39 39 id_, params = build_data(
40 40 self.apikey, 'update_pull_request',
41 41 repoid=pull_request.target_repo.repo_name,
42 42 pullrequestid=pull_request.pull_request_id,
43 43 title='New TITLE OF A PR',
44 44 description='New DESC OF A PR',
45 45 )
46 46 response = api_call(self.app, params)
47 47
48 48 expected = {
49 49 "msg": "Updated pull request `{}`".format(
50 50 pull_request.pull_request_id),
51 51 "pull_request": response.json['result']['pull_request'],
52 52 "updated_commits": {"added": [], "common": [], "removed": []},
53 53 "updated_reviewers": {"added": [], "removed": []},
54 54 }
55 55
56 56 response_json = response.json['result']
57 57 assert response_json == expected
58 58 pr = response_json['pull_request']
59 59 assert pr['title'] == 'New TITLE OF A PR'
60 60 assert pr['description'] == 'New DESC OF A PR'
61 61
62 62 @pytest.mark.backends("git", "hg")
63 63 def test_api_try_update_closed_pull_request(
64 64 self, pr_util, no_notifications):
65 65 pull_request = pr_util.create_pull_request()
66 66 PullRequestModel().close_pull_request(
67 67 pull_request, TEST_USER_ADMIN_LOGIN)
68 68
69 69 id_, params = build_data(
70 70 self.apikey, 'update_pull_request',
71 71 repoid=pull_request.target_repo.repo_name,
72 72 pullrequestid=pull_request.pull_request_id)
73 73 response = api_call(self.app, params)
74 74
75 75 expected = 'pull request `{}` update failed, pull request ' \
76 76 'is closed'.format(pull_request.pull_request_id)
77 77
78 78 assert_error(id_, expected, response.body)
79 79
80 80 @pytest.mark.backends("git", "hg")
81 81 def test_api_update_update_commits(self, pr_util, no_notifications):
82 82 commits = [
83 83 {'message': 'a'},
84 84 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
85 85 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
86 86 ]
87 87 pull_request = pr_util.create_pull_request(
88 88 commits=commits, target_head='a', source_head='b', revisions=['b'])
89 89 pr_util.update_source_repository(head='c')
90 90 repo = pull_request.source_repo.scm_instance()
91 91 commits = [x for x in repo.get_commits()]
92 92 print commits
93 93
94 94 added_commit_id = commits[-1].raw_id # c commit
95 95 common_commit_id = commits[1].raw_id # b commit is common ancestor
96 96 total_commits = [added_commit_id, common_commit_id]
97 97
98 98 id_, params = build_data(
99 99 self.apikey, 'update_pull_request',
100 100 repoid=pull_request.target_repo.repo_name,
101 101 pullrequestid=pull_request.pull_request_id,
102 102 update_commits=True
103 103 )
104 104 response = api_call(self.app, params)
105 105
106 106 expected = {
107 107 "msg": "Updated pull request `{}`".format(
108 108 pull_request.pull_request_id),
109 109 "pull_request": response.json['result']['pull_request'],
110 110 "updated_commits": {"added": [added_commit_id],
111 111 "common": [common_commit_id],
112 112 "total": total_commits,
113 113 "removed": []},
114 114 "updated_reviewers": {"added": [], "removed": []},
115 115 }
116 116
117 117 assert_ok(id_, expected, response.body)
118 118
119 119 @pytest.mark.backends("git", "hg")
120 120 def test_api_update_change_reviewers(
121 121 self, user_util, pr_util, no_notifications):
122 122 a = user_util.create_user()
123 123 b = user_util.create_user()
124 124 c = user_util.create_user()
125 125 new_reviewers = [
126 126 {'username': b.username,'reasons': ['updated via API'],
127 127 'mandatory':False},
128 128 {'username': c.username, 'reasons': ['updated via API'],
129 129 'mandatory':False},
130 130 ]
131 131
132 132 added = [b.username, c.username]
133 133 removed = [a.username]
134 134
135 135 pull_request = pr_util.create_pull_request(
136 reviewers=[(a.username, ['added via API'], False)])
136 reviewers=[(a.username, ['added via API'], False, [])])
137 137
138 138 id_, params = build_data(
139 139 self.apikey, 'update_pull_request',
140 140 repoid=pull_request.target_repo.repo_name,
141 141 pullrequestid=pull_request.pull_request_id,
142 142 reviewers=new_reviewers)
143 143 response = api_call(self.app, params)
144 144 expected = {
145 145 "msg": "Updated pull request `{}`".format(
146 146 pull_request.pull_request_id),
147 147 "pull_request": response.json['result']['pull_request'],
148 148 "updated_commits": {"added": [], "common": [], "removed": []},
149 149 "updated_reviewers": {"added": added, "removed": removed},
150 150 }
151 151
152 152 assert_ok(id_, expected, response.body)
153 153
154 154 @pytest.mark.backends("git", "hg")
155 155 def test_api_update_bad_user_in_reviewers(self, pr_util):
156 156 pull_request = pr_util.create_pull_request()
157 157
158 158 id_, params = build_data(
159 159 self.apikey, 'update_pull_request',
160 160 repoid=pull_request.target_repo.repo_name,
161 161 pullrequestid=pull_request.pull_request_id,
162 162 reviewers=[{'username': 'bad_name'}])
163 163 response = api_call(self.app, params)
164 164
165 165 expected = 'user `bad_name` does not exist'
166 166
167 167 assert_error(id_, expected, response.body)
168 168
169 169 @pytest.mark.backends("git", "hg")
170 170 def test_api_update_repo_error(self, pr_util):
171 171 pull_request = pr_util.create_pull_request()
172 172 id_, params = build_data(
173 173 self.apikey, 'update_pull_request',
174 174 repoid='fake',
175 175 pullrequestid=pull_request.pull_request_id,
176 176 reviewers=[{'username': 'bad_name'}])
177 177 response = api_call(self.app, params)
178 178
179 179 expected = 'repository `fake` does not exist'
180 180
181 181 response_json = response.json['error']
182 182 assert response_json == expected
183 183
184 184 @pytest.mark.backends("git", "hg")
185 185 def test_api_update_pull_request_error(self, pr_util):
186 186 pull_request = pr_util.create_pull_request()
187 187
188 188 id_, params = build_data(
189 189 self.apikey, 'update_pull_request',
190 190 repoid=pull_request.target_repo.repo_name,
191 191 pullrequestid=999999,
192 192 reviewers=[{'username': 'bad_name'}])
193 193 response = api_call(self.app, params)
194 194
195 195 expected = 'pull request `999999` does not exist'
196 196 assert_error(id_, expected, response.body)
197 197
198 198 @pytest.mark.backends("git", "hg")
199 199 def test_api_update_pull_request_no_perms_to_update(
200 200 self, user_util, pr_util):
201 201 user = user_util.create_user()
202 202 pull_request = pr_util.create_pull_request()
203 203
204 204 id_, params = build_data(
205 205 user.api_key, 'update_pull_request',
206 206 repoid=pull_request.target_repo.repo_name,
207 207 pullrequestid=pull_request.pull_request_id,)
208 208 response = api_call(self.app, params)
209 209
210 210 expected = ('pull request `%s` update failed, '
211 211 'no permission to update.') % pull_request.pull_request_id
212 212
213 213 assert_error(id_, expected, response.body)
@@ -1,248 +1,247 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 import formencode
24 24 import formencode.htmlfill
25 25
26 26 from pyramid.httpexceptions import HTTPFound
27 27 from pyramid.view import view_config
28 28 from pyramid.response import Response
29 29 from pyramid.renderers import render
30 30
31 31 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 32 from rhodecode.lib.auth import (
33 33 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
34 34 from rhodecode.lib import helpers as h, audit_logger
35 35 from rhodecode.lib.utils2 import safe_unicode
36 36
37 37 from rhodecode.model.forms import UserGroupForm
38 38 from rhodecode.model.permission import PermissionModel
39 39 from rhodecode.model.scm import UserGroupList
40 40 from rhodecode.model.db import (
41 41 or_, count, User, UserGroup, UserGroupMember)
42 42 from rhodecode.model.meta import Session
43 43 from rhodecode.model.user_group import UserGroupModel
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class AdminUserGroupsView(BaseAppView, DataGridAppView):
49 49
50 50 def load_default_context(self):
51 51 c = self._get_local_tmpl_context()
52 52
53 53 PermissionModel().set_global_permission_choices(
54 54 c, gettext_translator=self.request.translate)
55 55
56
57 56 return c
58 57
59 58 # permission check in data loading of
60 59 # `user_groups_list_data` via UserGroupList
61 60 @LoginRequired()
62 61 @NotAnonymous()
63 62 @view_config(
64 63 route_name='user_groups', request_method='GET',
65 64 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
66 65 def user_groups_list(self):
67 66 c = self.load_default_context()
68 67 return self._get_template_context(c)
69 68
70 69 # permission check inside
71 70 @LoginRequired()
72 71 @NotAnonymous()
73 72 @view_config(
74 73 route_name='user_groups_data', request_method='GET',
75 74 renderer='json_ext', xhr=True)
76 75 def user_groups_list_data(self):
77 76 self.load_default_context()
78 77 column_map = {
79 78 'active': 'users_group_active',
80 79 'description': 'user_group_description',
81 80 'members': 'members_total',
82 81 'owner': 'user_username',
83 82 'sync': 'group_data'
84 83 }
85 84 draw, start, limit = self._extract_chunk(self.request)
86 85 search_q, order_by, order_dir = self._extract_ordering(
87 86 self.request, column_map=column_map)
88 87
89 88 _render = self.request.get_partial_renderer(
90 89 'rhodecode:templates/data_table/_dt_elements.mako')
91 90
92 91 def user_group_name(user_group_id, user_group_name):
93 92 return _render("user_group_name", user_group_id, user_group_name)
94 93
95 94 def user_group_actions(user_group_id, user_group_name):
96 95 return _render("user_group_actions", user_group_id, user_group_name)
97 96
98 97 def user_profile(username):
99 98 return _render('user_profile', username)
100 99
101 100 auth_user_group_list = UserGroupList(
102 101 UserGroup.query().all(), perm_set=['usergroup.admin'])
103 102
104 103 allowed_ids = [-1]
105 104 for user_group in auth_user_group_list:
106 105 allowed_ids.append(user_group.users_group_id)
107 106
108 107 user_groups_data_total_count = UserGroup.query()\
109 108 .filter(UserGroup.users_group_id.in_(allowed_ids))\
110 109 .count()
111 110
112 111 member_count = count(UserGroupMember.user_id)
113 112 base_q = Session.query(
114 113 UserGroup.users_group_name,
115 114 UserGroup.user_group_description,
116 115 UserGroup.users_group_active,
117 116 UserGroup.users_group_id,
118 117 UserGroup.group_data,
119 118 User,
120 119 member_count.label('member_count')
121 120 ) \
122 121 .filter(UserGroup.users_group_id.in_(allowed_ids)) \
123 122 .outerjoin(UserGroupMember) \
124 123 .join(User, User.user_id == UserGroup.user_id) \
125 124 .group_by(UserGroup, User)
126 125
127 126 if search_q:
128 127 like_expression = u'%{}%'.format(safe_unicode(search_q))
129 128 base_q = base_q.filter(or_(
130 129 UserGroup.users_group_name.ilike(like_expression),
131 130 ))
132 131
133 132 user_groups_data_total_filtered_count = base_q.count()
134 133
135 134 if order_by == 'members_total':
136 135 sort_col = member_count
137 136 elif order_by == 'user_username':
138 137 sort_col = User.username
139 138 else:
140 139 sort_col = getattr(UserGroup, order_by, None)
141 140
142 141 if isinstance(sort_col, count) or sort_col:
143 142 if order_dir == 'asc':
144 143 sort_col = sort_col.asc()
145 144 else:
146 145 sort_col = sort_col.desc()
147 146
148 147 base_q = base_q.order_by(sort_col)
149 148 base_q = base_q.offset(start).limit(limit)
150 149
151 150 # authenticated access to user groups
152 151 auth_user_group_list = base_q.all()
153 152
154 153 user_groups_data = []
155 154 for user_gr in auth_user_group_list:
156 155 user_groups_data.append({
157 156 "users_group_name": user_group_name(
158 157 user_gr.users_group_id, h.escape(user_gr.users_group_name)),
159 158 "name_raw": h.escape(user_gr.users_group_name),
160 159 "description": h.escape(user_gr.user_group_description),
161 160 "members": user_gr.member_count,
162 161 # NOTE(marcink): because of advanced query we
163 162 # need to load it like that
164 163 "sync": UserGroup._load_group_data(
165 164 user_gr.group_data).get('extern_type'),
166 165 "active": h.bool2icon(user_gr.users_group_active),
167 166 "owner": user_profile(user_gr.User.username),
168 167 "action": user_group_actions(
169 168 user_gr.users_group_id, user_gr.users_group_name)
170 169 })
171 170
172 171 data = ({
173 172 'draw': draw,
174 173 'data': user_groups_data,
175 174 'recordsTotal': user_groups_data_total_count,
176 175 'recordsFiltered': user_groups_data_total_filtered_count,
177 176 })
178 177
179 178 return data
180 179
181 180 @LoginRequired()
182 181 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
183 182 @view_config(
184 183 route_name='user_groups_new', request_method='GET',
185 184 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
186 185 def user_groups_new(self):
187 186 c = self.load_default_context()
188 187 return self._get_template_context(c)
189 188
190 189 @LoginRequired()
191 190 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
192 191 @CSRFRequired()
193 192 @view_config(
194 193 route_name='user_groups_create', request_method='POST',
195 194 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
196 195 def user_groups_create(self):
197 196 _ = self.request.translate
198 197 c = self.load_default_context()
199 198 users_group_form = UserGroupForm(self.request.translate)()
200 199
201 200 user_group_name = self.request.POST.get('users_group_name')
202 201 try:
203 202 form_result = users_group_form.to_python(dict(self.request.POST))
204 203 user_group = UserGroupModel().create(
205 204 name=form_result['users_group_name'],
206 205 description=form_result['user_group_description'],
207 206 owner=self._rhodecode_user.user_id,
208 207 active=form_result['users_group_active'])
209 208 Session().flush()
210 209 creation_data = user_group.get_api_data()
211 210 user_group_name = form_result['users_group_name']
212 211
213 212 audit_logger.store_web(
214 213 'user_group.create', action_data={'data': creation_data},
215 214 user=self._rhodecode_user)
216 215
217 216 user_group_link = h.link_to(
218 217 h.escape(user_group_name),
219 218 h.route_path(
220 219 'edit_user_group', user_group_id=user_group.users_group_id))
221 220 h.flash(h.literal(_('Created user group %(user_group_link)s')
222 221 % {'user_group_link': user_group_link}),
223 222 category='success')
224 223 Session().commit()
225 224 user_group_id = user_group.users_group_id
226 225 except formencode.Invalid as errors:
227 226
228 227 data = render(
229 228 'rhodecode:templates/admin/user_groups/user_group_add.mako',
230 229 self._get_template_context(c), self.request)
231 230 html = formencode.htmlfill.render(
232 231 data,
233 232 defaults=errors.value,
234 233 errors=errors.error_dict or {},
235 234 prefix_error=False,
236 235 encoding="UTF-8",
237 236 force_defaults=False
238 237 )
239 238 return Response(html)
240 239
241 240 except Exception:
242 241 log.exception("Exception creating user group")
243 242 h.flash(_('Error occurred during creation of user group %s') \
244 243 % user_group_name, category='error')
245 244 raise HTTPFound(h.route_path('user_groups_new'))
246 245
247 246 raise HTTPFound(
248 247 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -1,1134 +1,1140 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35 from rhodecode.tests.utils import AssertResponse
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 @pytest.mark.usefixtures('app', 'autologin_user')
64 64 @pytest.mark.backends("git", "hg")
65 65 class TestPullrequestsView(object):
66 66
67 67 def test_index(self, backend):
68 68 self.app.get(route_path(
69 69 'pullrequest_new',
70 70 repo_name=backend.repo_name))
71 71
72 72 def test_option_menu_create_pull_request_exists(self, backend):
73 73 repo_name = backend.repo_name
74 74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 75
76 76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 77 'pullrequest_new', repo_name=repo_name)
78 78 response.mustcontain(create_pr_link)
79 79
80 80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 81 repo = backend.repo
82 82
83 83 self.app.get(
84 84 route_path('pullrequest_new',
85 85 repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 def test_show(self, pr_util, pr_merge_enabled):
91 91 pull_request = pr_util.create_pull_request(
92 92 mergeable=pr_merge_enabled, enable_notifications=False)
93 93
94 94 response = self.app.get(route_path(
95 95 'pullrequest_show',
96 96 repo_name=pull_request.target_repo.scm_instance().name,
97 97 pull_request_id=pull_request.pull_request_id))
98 98
99 99 for commit_id in pull_request.revisions:
100 100 response.mustcontain(commit_id)
101 101
102 102 assert pull_request.target_ref_parts.type in response
103 103 assert pull_request.target_ref_parts.name in response
104 104 target_clone_url = pull_request.target_repo.clone_url()
105 105 assert target_clone_url in response
106 106
107 107 assert 'class="pull-request-merge"' in response
108 108 assert (
109 109 'Server-side pull request merging is disabled.'
110 110 in response) != pr_merge_enabled
111 111
112 112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 113 # Logout
114 114 response = self.app.post(
115 115 h.route_path('logout'),
116 116 params={'csrf_token': csrf_token})
117 117 # Login as regular user
118 118 response = self.app.post(h.route_path('login'),
119 119 {'username': TEST_USER_REGULAR_LOGIN,
120 120 'password': 'test12'})
121 121
122 122 pull_request = pr_util.create_pull_request(
123 123 author=TEST_USER_REGULAR_LOGIN)
124 124
125 125 response = self.app.get(route_path(
126 126 'pullrequest_show',
127 127 repo_name=pull_request.target_repo.scm_instance().name,
128 128 pull_request_id=pull_request.pull_request_id))
129 129
130 130 response.mustcontain('Server-side pull request merging is disabled.')
131 131
132 132 assert_response = response.assert_response()
133 133 # for regular user without a merge permissions, we don't see it
134 134 assert_response.no_element_exists('#close-pull-request-action')
135 135
136 136 user_util.grant_user_permission_to_repo(
137 137 pull_request.target_repo,
138 138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 139 'repository.write')
140 140 response = self.app.get(route_path(
141 141 'pullrequest_show',
142 142 repo_name=pull_request.target_repo.scm_instance().name,
143 143 pull_request_id=pull_request.pull_request_id))
144 144
145 145 response.mustcontain('Server-side pull request merging is disabled.')
146 146
147 147 assert_response = response.assert_response()
148 148 # now regular user has a merge permissions, we have CLOSE button
149 149 assert_response.one_element_exists('#close-pull-request-action')
150 150
151 151 def test_show_invalid_commit_id(self, pr_util):
152 152 # Simulating invalid revisions which will cause a lookup error
153 153 pull_request = pr_util.create_pull_request()
154 154 pull_request.revisions = ['invalid']
155 155 Session().add(pull_request)
156 156 Session().commit()
157 157
158 158 response = self.app.get(route_path(
159 159 'pullrequest_show',
160 160 repo_name=pull_request.target_repo.scm_instance().name,
161 161 pull_request_id=pull_request.pull_request_id))
162 162
163 163 for commit_id in pull_request.revisions:
164 164 response.mustcontain(commit_id)
165 165
166 166 def test_show_invalid_source_reference(self, pr_util):
167 167 pull_request = pr_util.create_pull_request()
168 168 pull_request.source_ref = 'branch:b:invalid'
169 169 Session().add(pull_request)
170 170 Session().commit()
171 171
172 172 self.app.get(route_path(
173 173 'pullrequest_show',
174 174 repo_name=pull_request.target_repo.scm_instance().name,
175 175 pull_request_id=pull_request.pull_request_id))
176 176
177 177 def test_edit_title_description(self, pr_util, csrf_token):
178 178 pull_request = pr_util.create_pull_request()
179 179 pull_request_id = pull_request.pull_request_id
180 180
181 181 response = self.app.post(
182 182 route_path('pullrequest_update',
183 183 repo_name=pull_request.target_repo.repo_name,
184 184 pull_request_id=pull_request_id),
185 185 params={
186 186 'edit_pull_request': 'true',
187 187 'title': 'New title',
188 188 'description': 'New description',
189 189 'csrf_token': csrf_token})
190 190
191 191 assert_session_flash(
192 192 response, u'Pull request title & description updated.',
193 193 category='success')
194 194
195 195 pull_request = PullRequest.get(pull_request_id)
196 196 assert pull_request.title == 'New title'
197 197 assert pull_request.description == 'New description'
198 198
199 199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 200 pull_request = pr_util.create_pull_request()
201 201 pull_request_id = pull_request.pull_request_id
202 202 repo_name = pull_request.target_repo.repo_name
203 203 pr_util.close()
204 204
205 205 response = self.app.post(
206 206 route_path('pullrequest_update',
207 207 repo_name=repo_name, pull_request_id=pull_request_id),
208 208 params={
209 209 'edit_pull_request': 'true',
210 210 'title': 'New title',
211 211 'description': 'New description',
212 212 'csrf_token': csrf_token}, status=200)
213 213 assert_session_flash(
214 214 response, u'Cannot update closed pull requests.',
215 215 category='error')
216 216
217 217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219 219
220 220 pull_request = pr_util.create_pull_request()
221 221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 222 Session().add(pull_request)
223 223 Session().commit()
224 224
225 225 pull_request_id = pull_request.pull_request_id
226 226
227 227 response = self.app.post(
228 228 route_path('pullrequest_update',
229 229 repo_name=pull_request.target_repo.repo_name,
230 230 pull_request_id=pull_request_id),
231 231 params={'update_commits': 'true',
232 232 'csrf_token': csrf_token})
233 233
234 234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 235 UpdateFailureReason.MISSING_SOURCE_REF])
236 236 assert_session_flash(response, expected_msg, category='error')
237 237
238 238 def test_missing_target_reference(self, pr_util, csrf_token):
239 239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 240 pull_request = pr_util.create_pull_request(
241 241 approved=True, mergeable=True)
242 242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 243 Session().add(pull_request)
244 244 Session().commit()
245 245
246 246 pull_request_id = pull_request.pull_request_id
247 247 pull_request_url = route_path(
248 248 'pullrequest_show',
249 249 repo_name=pull_request.target_repo.repo_name,
250 250 pull_request_id=pull_request_id)
251 251
252 252 response = self.app.get(pull_request_url)
253 253
254 254 assertr = AssertResponse(response)
255 255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 256 MergeFailureReason.MISSING_TARGET_REF]
257 257 assertr.element_contains(
258 258 'span[data-role="merge-message"]', str(expected_msg))
259 259
260 260 def test_comment_and_close_pull_request_custom_message_approved(
261 261 self, pr_util, csrf_token, xhr_header):
262 262
263 263 pull_request = pr_util.create_pull_request(approved=True)
264 264 pull_request_id = pull_request.pull_request_id
265 265 author = pull_request.user_id
266 266 repo = pull_request.target_repo.repo_id
267 267
268 268 self.app.post(
269 269 route_path('pullrequest_comment_create',
270 270 repo_name=pull_request.target_repo.scm_instance().name,
271 271 pull_request_id=pull_request_id),
272 272 params={
273 273 'close_pull_request': '1',
274 274 'text': 'Closing a PR',
275 275 'csrf_token': csrf_token},
276 276 extra_environ=xhr_header,)
277 277
278 278 journal = UserLog.query()\
279 279 .filter(UserLog.user_id == author)\
280 280 .filter(UserLog.repository_id == repo) \
281 281 .order_by('user_log_id') \
282 282 .all()
283 283 assert journal[-1].action == 'repo.pull_request.close'
284 284
285 285 pull_request = PullRequest.get(pull_request_id)
286 286 assert pull_request.is_closed()
287 287
288 288 status = ChangesetStatusModel().get_status(
289 289 pull_request.source_repo, pull_request=pull_request)
290 290 assert status == ChangesetStatus.STATUS_APPROVED
291 291 comments = ChangesetComment().query() \
292 292 .filter(ChangesetComment.pull_request == pull_request) \
293 293 .order_by(ChangesetComment.comment_id.asc())\
294 294 .all()
295 295 assert comments[-1].text == 'Closing a PR'
296 296
297 297 def test_comment_force_close_pull_request_rejected(
298 298 self, pr_util, csrf_token, xhr_header):
299 299 pull_request = pr_util.create_pull_request()
300 300 pull_request_id = pull_request.pull_request_id
301 301 PullRequestModel().update_reviewers(
302 pull_request_id, [(1, ['reason'], False), (2, ['reason2'], False)],
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 303 pull_request.author)
304 304 author = pull_request.user_id
305 305 repo = pull_request.target_repo.repo_id
306 306
307 307 self.app.post(
308 308 route_path('pullrequest_comment_create',
309 309 repo_name=pull_request.target_repo.scm_instance().name,
310 310 pull_request_id=pull_request_id),
311 311 params={
312 312 'close_pull_request': '1',
313 313 'csrf_token': csrf_token},
314 314 extra_environ=xhr_header)
315 315
316 316 pull_request = PullRequest.get(pull_request_id)
317 317
318 318 journal = UserLog.query()\
319 319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 320 .order_by('user_log_id') \
321 321 .all()
322 322 assert journal[-1].action == 'repo.pull_request.close'
323 323
324 324 # check only the latest status, not the review status
325 325 status = ChangesetStatusModel().get_status(
326 326 pull_request.source_repo, pull_request=pull_request)
327 327 assert status == ChangesetStatus.STATUS_REJECTED
328 328
329 329 def test_comment_and_close_pull_request(
330 330 self, pr_util, csrf_token, xhr_header):
331 331 pull_request = pr_util.create_pull_request()
332 332 pull_request_id = pull_request.pull_request_id
333 333
334 334 response = self.app.post(
335 335 route_path('pullrequest_comment_create',
336 336 repo_name=pull_request.target_repo.scm_instance().name,
337 337 pull_request_id=pull_request.pull_request_id),
338 338 params={
339 339 'close_pull_request': 'true',
340 340 'csrf_token': csrf_token},
341 341 extra_environ=xhr_header)
342 342
343 343 assert response.json
344 344
345 345 pull_request = PullRequest.get(pull_request_id)
346 346 assert pull_request.is_closed()
347 347
348 348 # check only the latest status, not the review status
349 349 status = ChangesetStatusModel().get_status(
350 350 pull_request.source_repo, pull_request=pull_request)
351 351 assert status == ChangesetStatus.STATUS_REJECTED
352 352
353 353 def test_create_pull_request(self, backend, csrf_token):
354 354 commits = [
355 355 {'message': 'ancestor'},
356 356 {'message': 'change'},
357 357 {'message': 'change2'},
358 358 ]
359 359 commit_ids = backend.create_master_repo(commits)
360 360 target = backend.create_repo(heads=['ancestor'])
361 361 source = backend.create_repo(heads=['change2'])
362 362
363 363 response = self.app.post(
364 364 route_path('pullrequest_create', repo_name=source.repo_name),
365 365 [
366 366 ('source_repo', source.repo_name),
367 367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 368 ('target_repo', target.repo_name),
369 369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 370 ('common_ancestor', commit_ids['ancestor']),
371 371 ('pullrequest_desc', 'Description'),
372 372 ('pullrequest_title', 'Title'),
373 373 ('__start__', 'review_members:sequence'),
374 374 ('__start__', 'reviewer:mapping'),
375 375 ('user_id', '1'),
376 376 ('__start__', 'reasons:sequence'),
377 377 ('reason', 'Some reason'),
378 378 ('__end__', 'reasons:sequence'),
379 ('__start__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
379 381 ('mandatory', 'False'),
380 382 ('__end__', 'reviewer:mapping'),
381 383 ('__end__', 'review_members:sequence'),
382 384 ('__start__', 'revisions:sequence'),
383 385 ('revisions', commit_ids['change']),
384 386 ('revisions', commit_ids['change2']),
385 387 ('__end__', 'revisions:sequence'),
386 388 ('user', ''),
387 389 ('csrf_token', csrf_token),
388 390 ],
389 391 status=302)
390 392
391 393 location = response.headers['Location']
392 394 pull_request_id = location.rsplit('/', 1)[1]
393 395 assert pull_request_id != 'new'
394 396 pull_request = PullRequest.get(int(pull_request_id))
395 397
396 398 # check that we have now both revisions
397 399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
398 400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
399 401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
400 402 assert pull_request.target_ref == expected_target_ref
401 403
402 404 def test_reviewer_notifications(self, backend, csrf_token):
403 405 # We have to use the app.post for this test so it will create the
404 406 # notifications properly with the new PR
405 407 commits = [
406 408 {'message': 'ancestor',
407 409 'added': [FileNode('file_A', content='content_of_ancestor')]},
408 410 {'message': 'change',
409 411 'added': [FileNode('file_a', content='content_of_change')]},
410 412 {'message': 'change-child'},
411 413 {'message': 'ancestor-child', 'parents': ['ancestor'],
412 414 'added': [
413 415 FileNode('file_B', content='content_of_ancestor_child')]},
414 416 {'message': 'ancestor-child-2'},
415 417 ]
416 418 commit_ids = backend.create_master_repo(commits)
417 419 target = backend.create_repo(heads=['ancestor-child'])
418 420 source = backend.create_repo(heads=['change'])
419 421
420 422 response = self.app.post(
421 423 route_path('pullrequest_create', repo_name=source.repo_name),
422 424 [
423 425 ('source_repo', source.repo_name),
424 426 ('source_ref', 'branch:default:' + commit_ids['change']),
425 427 ('target_repo', target.repo_name),
426 428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
427 429 ('common_ancestor', commit_ids['ancestor']),
428 430 ('pullrequest_desc', 'Description'),
429 431 ('pullrequest_title', 'Title'),
430 432 ('__start__', 'review_members:sequence'),
431 433 ('__start__', 'reviewer:mapping'),
432 434 ('user_id', '2'),
433 435 ('__start__', 'reasons:sequence'),
434 436 ('reason', 'Some reason'),
435 437 ('__end__', 'reasons:sequence'),
438 ('__start__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
436 440 ('mandatory', 'False'),
437 441 ('__end__', 'reviewer:mapping'),
438 442 ('__end__', 'review_members:sequence'),
439 443 ('__start__', 'revisions:sequence'),
440 444 ('revisions', commit_ids['change']),
441 445 ('__end__', 'revisions:sequence'),
442 446 ('user', ''),
443 447 ('csrf_token', csrf_token),
444 448 ],
445 449 status=302)
446 450
447 451 location = response.headers['Location']
448 452
449 453 pull_request_id = location.rsplit('/', 1)[1]
450 454 assert pull_request_id != 'new'
451 455 pull_request = PullRequest.get(int(pull_request_id))
452 456
453 457 # Check that a notification was made
454 458 notifications = Notification.query()\
455 459 .filter(Notification.created_by == pull_request.author.user_id,
456 460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
457 461 Notification.subject.contains(
458 462 "wants you to review pull request #%s" % pull_request_id))
459 463 assert len(notifications.all()) == 1
460 464
461 465 # Change reviewers and check that a notification was made
462 466 PullRequestModel().update_reviewers(
463 pull_request.pull_request_id, [(1, [], False)],
467 pull_request.pull_request_id, [(1, [], False, [])],
464 468 pull_request.author)
465 469 assert len(notifications.all()) == 2
466 470
467 471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
468 472 csrf_token):
469 473 commits = [
470 474 {'message': 'ancestor',
471 475 'added': [FileNode('file_A', content='content_of_ancestor')]},
472 476 {'message': 'change',
473 477 'added': [FileNode('file_a', content='content_of_change')]},
474 478 {'message': 'change-child'},
475 479 {'message': 'ancestor-child', 'parents': ['ancestor'],
476 480 'added': [
477 481 FileNode('file_B', content='content_of_ancestor_child')]},
478 482 {'message': 'ancestor-child-2'},
479 483 ]
480 484 commit_ids = backend.create_master_repo(commits)
481 485 target = backend.create_repo(heads=['ancestor-child'])
482 486 source = backend.create_repo(heads=['change'])
483 487
484 488 response = self.app.post(
485 489 route_path('pullrequest_create', repo_name=source.repo_name),
486 490 [
487 491 ('source_repo', source.repo_name),
488 492 ('source_ref', 'branch:default:' + commit_ids['change']),
489 493 ('target_repo', target.repo_name),
490 494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
491 495 ('common_ancestor', commit_ids['ancestor']),
492 496 ('pullrequest_desc', 'Description'),
493 497 ('pullrequest_title', 'Title'),
494 498 ('__start__', 'review_members:sequence'),
495 499 ('__start__', 'reviewer:mapping'),
496 500 ('user_id', '1'),
497 501 ('__start__', 'reasons:sequence'),
498 502 ('reason', 'Some reason'),
499 503 ('__end__', 'reasons:sequence'),
504 ('__start__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
500 506 ('mandatory', 'False'),
501 507 ('__end__', 'reviewer:mapping'),
502 508 ('__end__', 'review_members:sequence'),
503 509 ('__start__', 'revisions:sequence'),
504 510 ('revisions', commit_ids['change']),
505 511 ('__end__', 'revisions:sequence'),
506 512 ('user', ''),
507 513 ('csrf_token', csrf_token),
508 514 ],
509 515 status=302)
510 516
511 517 location = response.headers['Location']
512 518
513 519 pull_request_id = location.rsplit('/', 1)[1]
514 520 assert pull_request_id != 'new'
515 521 pull_request = PullRequest.get(int(pull_request_id))
516 522
517 523 # target_ref has to point to the ancestor's commit_id in order to
518 524 # show the correct diff
519 525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
520 526 assert pull_request.target_ref == expected_target_ref
521 527
522 528 # Check generated diff contents
523 529 response = response.follow()
524 530 assert 'content_of_ancestor' not in response.body
525 531 assert 'content_of_ancestor-child' not in response.body
526 532 assert 'content_of_change' in response.body
527 533
528 534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
529 535 # Clear any previous calls to rcextensions
530 536 rhodecode.EXTENSIONS.calls.clear()
531 537
532 538 pull_request = pr_util.create_pull_request(
533 539 approved=True, mergeable=True)
534 540 pull_request_id = pull_request.pull_request_id
535 541 repo_name = pull_request.target_repo.scm_instance().name,
536 542
537 543 response = self.app.post(
538 544 route_path('pullrequest_merge',
539 545 repo_name=str(repo_name[0]),
540 546 pull_request_id=pull_request_id),
541 547 params={'csrf_token': csrf_token}).follow()
542 548
543 549 pull_request = PullRequest.get(pull_request_id)
544 550
545 551 assert response.status_int == 200
546 552 assert pull_request.is_closed()
547 553 assert_pull_request_status(
548 554 pull_request, ChangesetStatus.STATUS_APPROVED)
549 555
550 556 # Check the relevant log entries were added
551 557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
552 558 actions = [log.action for log in user_logs]
553 559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
554 560 expected_actions = [
555 561 u'repo.pull_request.close',
556 562 u'repo.pull_request.merge',
557 563 u'repo.pull_request.comment.create'
558 564 ]
559 565 assert actions == expected_actions
560 566
561 567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
562 568 actions = [log for log in user_logs]
563 569 assert actions[-1].action == 'user.push'
564 570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
565 571
566 572 # Check post_push rcextension was really executed
567 573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
568 574 assert len(push_calls) == 1
569 575 unused_last_call_args, last_call_kwargs = push_calls[0]
570 576 assert last_call_kwargs['action'] == 'push'
571 577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
572 578
573 579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
574 580 pull_request = pr_util.create_pull_request(mergeable=False)
575 581 pull_request_id = pull_request.pull_request_id
576 582 pull_request = PullRequest.get(pull_request_id)
577 583
578 584 response = self.app.post(
579 585 route_path('pullrequest_merge',
580 586 repo_name=pull_request.target_repo.scm_instance().name,
581 587 pull_request_id=pull_request.pull_request_id),
582 588 params={'csrf_token': csrf_token}).follow()
583 589
584 590 assert response.status_int == 200
585 591 response.mustcontain(
586 592 'Merge is not currently possible because of below failed checks.')
587 593 response.mustcontain('Server-side pull request merging is disabled.')
588 594
589 595 @pytest.mark.skip_backends('svn')
590 596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
591 597 pull_request = pr_util.create_pull_request(mergeable=True)
592 598 pull_request_id = pull_request.pull_request_id
593 599 repo_name = pull_request.target_repo.scm_instance().name
594 600
595 601 response = self.app.post(
596 602 route_path('pullrequest_merge',
597 603 repo_name=repo_name,
598 604 pull_request_id=pull_request_id),
599 605 params={'csrf_token': csrf_token}).follow()
600 606
601 607 assert response.status_int == 200
602 608
603 609 response.mustcontain(
604 610 'Merge is not currently possible because of below failed checks.')
605 611 response.mustcontain('Pull request reviewer approval is pending.')
606 612
607 613 def test_merge_pull_request_renders_failure_reason(
608 614 self, user_regular, csrf_token, pr_util):
609 615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
610 616 pull_request_id = pull_request.pull_request_id
611 617 repo_name = pull_request.target_repo.scm_instance().name
612 618
613 619 model_patcher = mock.patch.multiple(
614 620 PullRequestModel,
615 621 merge=mock.Mock(return_value=MergeResponse(
616 622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
617 623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
618 624
619 625 with model_patcher:
620 626 response = self.app.post(
621 627 route_path('pullrequest_merge',
622 628 repo_name=repo_name,
623 629 pull_request_id=pull_request_id),
624 630 params={'csrf_token': csrf_token}, status=302)
625 631
626 632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
627 633 MergeFailureReason.PUSH_FAILED])
628 634
629 635 def test_update_source_revision(self, backend, csrf_token):
630 636 commits = [
631 637 {'message': 'ancestor'},
632 638 {'message': 'change'},
633 639 {'message': 'change-2'},
634 640 ]
635 641 commit_ids = backend.create_master_repo(commits)
636 642 target = backend.create_repo(heads=['ancestor'])
637 643 source = backend.create_repo(heads=['change'])
638 644
639 645 # create pr from a in source to A in target
640 646 pull_request = PullRequest()
641 647 pull_request.source_repo = source
642 648 # TODO: johbo: Make sure that we write the source ref this way!
643 649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
644 650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
645 651 pull_request.target_repo = target
646 652
647 653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
648 654 branch=backend.default_branch_name,
649 655 commit_id=commit_ids['ancestor'])
650 656 pull_request.revisions = [commit_ids['change']]
651 657 pull_request.title = u"Test"
652 658 pull_request.description = u"Description"
653 659 pull_request.author = UserModel().get_by_username(
654 660 TEST_USER_ADMIN_LOGIN)
655 661 Session().add(pull_request)
656 662 Session().commit()
657 663 pull_request_id = pull_request.pull_request_id
658 664
659 665 # source has ancestor - change - change-2
660 666 backend.pull_heads(source, heads=['change-2'])
661 667
662 668 # update PR
663 669 self.app.post(
664 670 route_path('pullrequest_update',
665 671 repo_name=target.repo_name,
666 672 pull_request_id=pull_request_id),
667 673 params={'update_commits': 'true',
668 674 'csrf_token': csrf_token})
669 675
670 676 # check that we have now both revisions
671 677 pull_request = PullRequest.get(pull_request_id)
672 678 assert pull_request.revisions == [
673 679 commit_ids['change-2'], commit_ids['change']]
674 680
675 681 # TODO: johbo: this should be a test on its own
676 682 response = self.app.get(route_path(
677 683 'pullrequest_new',
678 684 repo_name=target.repo_name))
679 685 assert response.status_int == 200
680 686 assert 'Pull request updated to' in response.body
681 687 assert 'with 1 added, 0 removed commits.' in response.body
682 688
683 689 def test_update_target_revision(self, backend, csrf_token):
684 690 commits = [
685 691 {'message': 'ancestor'},
686 692 {'message': 'change'},
687 693 {'message': 'ancestor-new', 'parents': ['ancestor']},
688 694 {'message': 'change-rebased'},
689 695 ]
690 696 commit_ids = backend.create_master_repo(commits)
691 697 target = backend.create_repo(heads=['ancestor'])
692 698 source = backend.create_repo(heads=['change'])
693 699
694 700 # create pr from a in source to A in target
695 701 pull_request = PullRequest()
696 702 pull_request.source_repo = source
697 703 # TODO: johbo: Make sure that we write the source ref this way!
698 704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
699 705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
700 706 pull_request.target_repo = target
701 707 # TODO: johbo: Target ref should be branch based, since tip can jump
702 708 # from branch to branch
703 709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
704 710 branch=backend.default_branch_name,
705 711 commit_id=commit_ids['ancestor'])
706 712 pull_request.revisions = [commit_ids['change']]
707 713 pull_request.title = u"Test"
708 714 pull_request.description = u"Description"
709 715 pull_request.author = UserModel().get_by_username(
710 716 TEST_USER_ADMIN_LOGIN)
711 717 Session().add(pull_request)
712 718 Session().commit()
713 719 pull_request_id = pull_request.pull_request_id
714 720
715 721 # target has ancestor - ancestor-new
716 722 # source has ancestor - ancestor-new - change-rebased
717 723 backend.pull_heads(target, heads=['ancestor-new'])
718 724 backend.pull_heads(source, heads=['change-rebased'])
719 725
720 726 # update PR
721 727 self.app.post(
722 728 route_path('pullrequest_update',
723 729 repo_name=target.repo_name,
724 730 pull_request_id=pull_request_id),
725 731 params={'update_commits': 'true',
726 732 'csrf_token': csrf_token},
727 733 status=200)
728 734
729 735 # check that we have now both revisions
730 736 pull_request = PullRequest.get(pull_request_id)
731 737 assert pull_request.revisions == [commit_ids['change-rebased']]
732 738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
733 739 branch=backend.default_branch_name,
734 740 commit_id=commit_ids['ancestor-new'])
735 741
736 742 # TODO: johbo: This should be a test on its own
737 743 response = self.app.get(route_path(
738 744 'pullrequest_new',
739 745 repo_name=target.repo_name))
740 746 assert response.status_int == 200
741 747 assert 'Pull request updated to' in response.body
742 748 assert 'with 1 added, 1 removed commits.' in response.body
743 749
744 750 def test_update_of_ancestor_reference(self, backend, csrf_token):
745 751 commits = [
746 752 {'message': 'ancestor'},
747 753 {'message': 'change'},
748 754 {'message': 'change-2'},
749 755 {'message': 'ancestor-new', 'parents': ['ancestor']},
750 756 {'message': 'change-rebased'},
751 757 ]
752 758 commit_ids = backend.create_master_repo(commits)
753 759 target = backend.create_repo(heads=['ancestor'])
754 760 source = backend.create_repo(heads=['change'])
755 761
756 762 # create pr from a in source to A in target
757 763 pull_request = PullRequest()
758 764 pull_request.source_repo = source
759 765 # TODO: johbo: Make sure that we write the source ref this way!
760 766 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
761 767 branch=backend.default_branch_name,
762 768 commit_id=commit_ids['change'])
763 769 pull_request.target_repo = target
764 770 # TODO: johbo: Target ref should be branch based, since tip can jump
765 771 # from branch to branch
766 772 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
767 773 branch=backend.default_branch_name,
768 774 commit_id=commit_ids['ancestor'])
769 775 pull_request.revisions = [commit_ids['change']]
770 776 pull_request.title = u"Test"
771 777 pull_request.description = u"Description"
772 778 pull_request.author = UserModel().get_by_username(
773 779 TEST_USER_ADMIN_LOGIN)
774 780 Session().add(pull_request)
775 781 Session().commit()
776 782 pull_request_id = pull_request.pull_request_id
777 783
778 784 # target has ancestor - ancestor-new
779 785 # source has ancestor - ancestor-new - change-rebased
780 786 backend.pull_heads(target, heads=['ancestor-new'])
781 787 backend.pull_heads(source, heads=['change-rebased'])
782 788
783 789 # update PR
784 790 self.app.post(
785 791 route_path('pullrequest_update',
786 792 repo_name=target.repo_name,
787 793 pull_request_id=pull_request_id),
788 794 params={'update_commits': 'true',
789 795 'csrf_token': csrf_token},
790 796 status=200)
791 797
792 798 # Expect the target reference to be updated correctly
793 799 pull_request = PullRequest.get(pull_request_id)
794 800 assert pull_request.revisions == [commit_ids['change-rebased']]
795 801 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
796 802 branch=backend.default_branch_name,
797 803 commit_id=commit_ids['ancestor-new'])
798 804 assert pull_request.target_ref == expected_target_ref
799 805
800 806 def test_remove_pull_request_branch(self, backend_git, csrf_token):
801 807 branch_name = 'development'
802 808 commits = [
803 809 {'message': 'initial-commit'},
804 810 {'message': 'old-feature'},
805 811 {'message': 'new-feature', 'branch': branch_name},
806 812 ]
807 813 repo = backend_git.create_repo(commits)
808 814 commit_ids = backend_git.commit_ids
809 815
810 816 pull_request = PullRequest()
811 817 pull_request.source_repo = repo
812 818 pull_request.target_repo = repo
813 819 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
814 820 branch=branch_name, commit_id=commit_ids['new-feature'])
815 821 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
816 822 branch=backend_git.default_branch_name,
817 823 commit_id=commit_ids['old-feature'])
818 824 pull_request.revisions = [commit_ids['new-feature']]
819 825 pull_request.title = u"Test"
820 826 pull_request.description = u"Description"
821 827 pull_request.author = UserModel().get_by_username(
822 828 TEST_USER_ADMIN_LOGIN)
823 829 Session().add(pull_request)
824 830 Session().commit()
825 831
826 832 vcs = repo.scm_instance()
827 833 vcs.remove_ref('refs/heads/{}'.format(branch_name))
828 834
829 835 response = self.app.get(route_path(
830 836 'pullrequest_show',
831 837 repo_name=repo.repo_name,
832 838 pull_request_id=pull_request.pull_request_id))
833 839
834 840 assert response.status_int == 200
835 841 assert_response = AssertResponse(response)
836 842 assert_response.element_contains(
837 843 '#changeset_compare_view_content .alert strong',
838 844 'Missing commits')
839 845 assert_response.element_contains(
840 846 '#changeset_compare_view_content .alert',
841 847 'This pull request cannot be displayed, because one or more'
842 848 ' commits no longer exist in the source repository.')
843 849
844 850 def test_strip_commits_from_pull_request(
845 851 self, backend, pr_util, csrf_token):
846 852 commits = [
847 853 {'message': 'initial-commit'},
848 854 {'message': 'old-feature'},
849 855 {'message': 'new-feature', 'parents': ['initial-commit']},
850 856 ]
851 857 pull_request = pr_util.create_pull_request(
852 858 commits, target_head='initial-commit', source_head='new-feature',
853 859 revisions=['new-feature'])
854 860
855 861 vcs = pr_util.source_repository.scm_instance()
856 862 if backend.alias == 'git':
857 863 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
858 864 else:
859 865 vcs.strip(pr_util.commit_ids['new-feature'])
860 866
861 867 response = self.app.get(route_path(
862 868 'pullrequest_show',
863 869 repo_name=pr_util.target_repository.repo_name,
864 870 pull_request_id=pull_request.pull_request_id))
865 871
866 872 assert response.status_int == 200
867 873 assert_response = AssertResponse(response)
868 874 assert_response.element_contains(
869 875 '#changeset_compare_view_content .alert strong',
870 876 'Missing commits')
871 877 assert_response.element_contains(
872 878 '#changeset_compare_view_content .alert',
873 879 'This pull request cannot be displayed, because one or more'
874 880 ' commits no longer exist in the source repository.')
875 881 assert_response.element_contains(
876 882 '#update_commits',
877 883 'Update commits')
878 884
879 885 def test_strip_commits_and_update(
880 886 self, backend, pr_util, csrf_token):
881 887 commits = [
882 888 {'message': 'initial-commit'},
883 889 {'message': 'old-feature'},
884 890 {'message': 'new-feature', 'parents': ['old-feature']},
885 891 ]
886 892 pull_request = pr_util.create_pull_request(
887 893 commits, target_head='old-feature', source_head='new-feature',
888 894 revisions=['new-feature'], mergeable=True)
889 895
890 896 vcs = pr_util.source_repository.scm_instance()
891 897 if backend.alias == 'git':
892 898 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
893 899 else:
894 900 vcs.strip(pr_util.commit_ids['new-feature'])
895 901
896 902 response = self.app.post(
897 903 route_path('pullrequest_update',
898 904 repo_name=pull_request.target_repo.repo_name,
899 905 pull_request_id=pull_request.pull_request_id),
900 906 params={'update_commits': 'true',
901 907 'csrf_token': csrf_token})
902 908
903 909 assert response.status_int == 200
904 910 assert response.body == 'true'
905 911
906 912 # Make sure that after update, it won't raise 500 errors
907 913 response = self.app.get(route_path(
908 914 'pullrequest_show',
909 915 repo_name=pr_util.target_repository.repo_name,
910 916 pull_request_id=pull_request.pull_request_id))
911 917
912 918 assert response.status_int == 200
913 919 assert_response = AssertResponse(response)
914 920 assert_response.element_contains(
915 921 '#changeset_compare_view_content .alert strong',
916 922 'Missing commits')
917 923
918 924 def test_branch_is_a_link(self, pr_util):
919 925 pull_request = pr_util.create_pull_request()
920 926 pull_request.source_ref = 'branch:origin:1234567890abcdef'
921 927 pull_request.target_ref = 'branch:target:abcdef1234567890'
922 928 Session().add(pull_request)
923 929 Session().commit()
924 930
925 931 response = self.app.get(route_path(
926 932 'pullrequest_show',
927 933 repo_name=pull_request.target_repo.scm_instance().name,
928 934 pull_request_id=pull_request.pull_request_id))
929 935 assert response.status_int == 200
930 936 assert_response = AssertResponse(response)
931 937
932 938 origin = assert_response.get_element('.pr-origininfo .tag')
933 939 origin_children = origin.getchildren()
934 940 assert len(origin_children) == 1
935 941 target = assert_response.get_element('.pr-targetinfo .tag')
936 942 target_children = target.getchildren()
937 943 assert len(target_children) == 1
938 944
939 945 expected_origin_link = route_path(
940 946 'repo_changelog',
941 947 repo_name=pull_request.source_repo.scm_instance().name,
942 948 params=dict(branch='origin'))
943 949 expected_target_link = route_path(
944 950 'repo_changelog',
945 951 repo_name=pull_request.target_repo.scm_instance().name,
946 952 params=dict(branch='target'))
947 953 assert origin_children[0].attrib['href'] == expected_origin_link
948 954 assert origin_children[0].text == 'branch: origin'
949 955 assert target_children[0].attrib['href'] == expected_target_link
950 956 assert target_children[0].text == 'branch: target'
951 957
952 958 def test_bookmark_is_not_a_link(self, pr_util):
953 959 pull_request = pr_util.create_pull_request()
954 960 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
955 961 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
956 962 Session().add(pull_request)
957 963 Session().commit()
958 964
959 965 response = self.app.get(route_path(
960 966 'pullrequest_show',
961 967 repo_name=pull_request.target_repo.scm_instance().name,
962 968 pull_request_id=pull_request.pull_request_id))
963 969 assert response.status_int == 200
964 970 assert_response = AssertResponse(response)
965 971
966 972 origin = assert_response.get_element('.pr-origininfo .tag')
967 973 assert origin.text.strip() == 'bookmark: origin'
968 974 assert origin.getchildren() == []
969 975
970 976 target = assert_response.get_element('.pr-targetinfo .tag')
971 977 assert target.text.strip() == 'bookmark: target'
972 978 assert target.getchildren() == []
973 979
974 980 def test_tag_is_not_a_link(self, pr_util):
975 981 pull_request = pr_util.create_pull_request()
976 982 pull_request.source_ref = 'tag:origin:1234567890abcdef'
977 983 pull_request.target_ref = 'tag:target:abcdef1234567890'
978 984 Session().add(pull_request)
979 985 Session().commit()
980 986
981 987 response = self.app.get(route_path(
982 988 'pullrequest_show',
983 989 repo_name=pull_request.target_repo.scm_instance().name,
984 990 pull_request_id=pull_request.pull_request_id))
985 991 assert response.status_int == 200
986 992 assert_response = AssertResponse(response)
987 993
988 994 origin = assert_response.get_element('.pr-origininfo .tag')
989 995 assert origin.text.strip() == 'tag: origin'
990 996 assert origin.getchildren() == []
991 997
992 998 target = assert_response.get_element('.pr-targetinfo .tag')
993 999 assert target.text.strip() == 'tag: target'
994 1000 assert target.getchildren() == []
995 1001
996 1002 @pytest.mark.parametrize('mergeable', [True, False])
997 1003 def test_shadow_repository_link(
998 1004 self, mergeable, pr_util, http_host_only_stub):
999 1005 """
1000 1006 Check that the pull request summary page displays a link to the shadow
1001 1007 repository if the pull request is mergeable. If it is not mergeable
1002 1008 the link should not be displayed.
1003 1009 """
1004 1010 pull_request = pr_util.create_pull_request(
1005 1011 mergeable=mergeable, enable_notifications=False)
1006 1012 target_repo = pull_request.target_repo.scm_instance()
1007 1013 pr_id = pull_request.pull_request_id
1008 1014 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1009 1015 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1010 1016
1011 1017 response = self.app.get(route_path(
1012 1018 'pullrequest_show',
1013 1019 repo_name=target_repo.name,
1014 1020 pull_request_id=pr_id))
1015 1021
1016 1022 assertr = AssertResponse(response)
1017 1023 if mergeable:
1018 1024 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1019 1025 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1020 1026 else:
1021 1027 assertr.no_element_exists('.pr-mergeinfo')
1022 1028
1023 1029
1024 1030 @pytest.mark.usefixtures('app')
1025 1031 @pytest.mark.backends("git", "hg")
1026 1032 class TestPullrequestsControllerDelete(object):
1027 1033 def test_pull_request_delete_button_permissions_admin(
1028 1034 self, autologin_user, user_admin, pr_util):
1029 1035 pull_request = pr_util.create_pull_request(
1030 1036 author=user_admin.username, enable_notifications=False)
1031 1037
1032 1038 response = self.app.get(route_path(
1033 1039 'pullrequest_show',
1034 1040 repo_name=pull_request.target_repo.scm_instance().name,
1035 1041 pull_request_id=pull_request.pull_request_id))
1036 1042
1037 1043 response.mustcontain('id="delete_pullrequest"')
1038 1044 response.mustcontain('Confirm to delete this pull request')
1039 1045
1040 1046 def test_pull_request_delete_button_permissions_owner(
1041 1047 self, autologin_regular_user, user_regular, pr_util):
1042 1048 pull_request = pr_util.create_pull_request(
1043 1049 author=user_regular.username, enable_notifications=False)
1044 1050
1045 1051 response = self.app.get(route_path(
1046 1052 'pullrequest_show',
1047 1053 repo_name=pull_request.target_repo.scm_instance().name,
1048 1054 pull_request_id=pull_request.pull_request_id))
1049 1055
1050 1056 response.mustcontain('id="delete_pullrequest"')
1051 1057 response.mustcontain('Confirm to delete this pull request')
1052 1058
1053 1059 def test_pull_request_delete_button_permissions_forbidden(
1054 1060 self, autologin_regular_user, user_regular, user_admin, pr_util):
1055 1061 pull_request = pr_util.create_pull_request(
1056 1062 author=user_admin.username, enable_notifications=False)
1057 1063
1058 1064 response = self.app.get(route_path(
1059 1065 'pullrequest_show',
1060 1066 repo_name=pull_request.target_repo.scm_instance().name,
1061 1067 pull_request_id=pull_request.pull_request_id))
1062 1068 response.mustcontain(no=['id="delete_pullrequest"'])
1063 1069 response.mustcontain(no=['Confirm to delete this pull request'])
1064 1070
1065 1071 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1066 1072 self, autologin_regular_user, user_regular, user_admin, pr_util,
1067 1073 user_util):
1068 1074
1069 1075 pull_request = pr_util.create_pull_request(
1070 1076 author=user_admin.username, enable_notifications=False)
1071 1077
1072 1078 user_util.grant_user_permission_to_repo(
1073 1079 pull_request.target_repo, user_regular,
1074 1080 'repository.write')
1075 1081
1076 1082 response = self.app.get(route_path(
1077 1083 'pullrequest_show',
1078 1084 repo_name=pull_request.target_repo.scm_instance().name,
1079 1085 pull_request_id=pull_request.pull_request_id))
1080 1086
1081 1087 response.mustcontain('id="open_edit_pullrequest"')
1082 1088 response.mustcontain('id="delete_pullrequest"')
1083 1089 response.mustcontain(no=['Confirm to delete this pull request'])
1084 1090
1085 1091 def test_delete_comment_returns_404_if_comment_does_not_exist(
1086 1092 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1087 1093
1088 1094 pull_request = pr_util.create_pull_request(
1089 1095 author=user_admin.username, enable_notifications=False)
1090 1096
1091 1097 self.app.post(
1092 1098 route_path(
1093 1099 'pullrequest_comment_delete',
1094 1100 repo_name=pull_request.target_repo.scm_instance().name,
1095 1101 pull_request_id=pull_request.pull_request_id,
1096 1102 comment_id=1024404),
1097 1103 extra_environ=xhr_header,
1098 1104 params={'csrf_token': csrf_token},
1099 1105 status=404
1100 1106 )
1101 1107
1102 1108 def test_delete_comment(
1103 1109 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1104 1110
1105 1111 pull_request = pr_util.create_pull_request(
1106 1112 author=user_admin.username, enable_notifications=False)
1107 1113 comment = pr_util.create_comment()
1108 1114 comment_id = comment.comment_id
1109 1115
1110 1116 response = self.app.post(
1111 1117 route_path(
1112 1118 'pullrequest_comment_delete',
1113 1119 repo_name=pull_request.target_repo.scm_instance().name,
1114 1120 pull_request_id=pull_request.pull_request_id,
1115 1121 comment_id=comment_id),
1116 1122 extra_environ=xhr_header,
1117 1123 params={'csrf_token': csrf_token},
1118 1124 status=200
1119 1125 )
1120 1126 assert response.body == 'true'
1121 1127
1122 1128
1123 1129 def assert_pull_request_status(pull_request, expected_status):
1124 1130 status = ChangesetStatusModel().calculated_review_status(
1125 1131 pull_request=pull_request)
1126 1132 assert status == expected_status
1127 1133
1128 1134
1129 1135 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1130 1136 @pytest.mark.usefixtures("autologin_user")
1131 1137 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1132 1138 response = app.get(
1133 1139 route_path(route, repo_name=backend_svn.repo_name), status=404)
1134 1140
@@ -1,76 +1,79 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from rhodecode.lib import helpers as h
22 22 from rhodecode.lib.utils2 import safe_int
23 23
24 24
25 def reviewer_as_json(user, reasons=None, mandatory=False):
25 def reviewer_as_json(user, reasons=None, mandatory=False, rules=None, user_group=None):
26 26 """
27 27 Returns json struct of a reviewer for frontend
28 28
29 29 :param user: the reviewer
30 30 :param reasons: list of strings of why they are reviewers
31 31 :param mandatory: bool, to set user as mandatory
32 32 """
33 33
34 34 return {
35 35 'user_id': user.user_id,
36 36 'reasons': reasons or [],
37 'rules': rules or [],
37 38 'mandatory': mandatory,
39 'user_group': user_group,
38 40 'username': user.username,
39 41 'first_name': user.first_name,
40 42 'last_name': user.last_name,
43 'user_link': h.link_to_user(user),
41 44 'gravatar_link': h.gravatar_url(user.email, 14),
42 45 }
43 46
44 47
45 48 def get_default_reviewers_data(
46 49 current_user, source_repo, source_commit, target_repo, target_commit):
47 50
48 51 """ Return json for default reviewers of a repository """
49 52
50 53 reasons = ['Default reviewer', 'Repository owner']
51 54 default = reviewer_as_json(
52 55 user=current_user, reasons=reasons, mandatory=False)
53 56
54 57 return {
55 58 'api_ver': 'v1', # define version for later possible schema upgrade
56 59 'reviewers': [default],
57 60 'rules': {},
58 61 'rules_data': {},
59 62 }
60 63
61 64
62 65 def validate_default_reviewers(review_members, reviewer_rules):
63 66 """
64 67 Function to validate submitted reviewers against the saved rules
65 68
66 69 """
67 70 reviewers = []
68 71 reviewer_by_id = {}
69 72 for r in review_members:
70 73 reviewer_user_id = safe_int(r['user_id'])
71 entry = (reviewer_user_id, r['reasons'], r['mandatory'])
74 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['rules'])
72 75
73 76 reviewer_by_id[reviewer_user_id] = entry
74 77 reviewers.append(entry)
75 78
76 79 return reviewers
@@ -1,2072 +1,2077 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import random
29 29 import hashlib
30 30 import StringIO
31 31 import urllib
32 32 import math
33 33 import logging
34 34 import re
35 35 import urlparse
36 36 import time
37 37 import string
38 38 import hashlib
39 39 from collections import OrderedDict
40 40
41 41 import pygments
42 42 import itertools
43 43 import fnmatch
44 44
45 45 from datetime import datetime
46 46 from functools import partial
47 47 from pygments.formatters.html import HtmlFormatter
48 48 from pygments import highlight as code_highlight
49 49 from pygments.lexers import (
50 50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51 51
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from webhelpers.html import literal, HTML, escape
55 55 from webhelpers.html.tools import *
56 56 from webhelpers.html.builder import make_tag
57 57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
58 58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
59 59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
60 60 submit, text, password, textarea, title, ul, xml_declaration, radio
61 61 from webhelpers.html.tools import auto_link, button_to, highlight, \
62 62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
63 63 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
64 64 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
65 65 replace_whitespace, urlify, truncate, wrap_paragraphs
66 66 from webhelpers.date import time_ago_in_words
67 67 from webhelpers.paginate import Page as _Page
68 68 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
69 69 convert_boolean_attrs, NotGiven, _make_safe_id_component
70 70 from webhelpers2.number import format_byte_size
71 71
72 72 from rhodecode.lib.action_parser import action_parser
73 73 from rhodecode.lib.ext_json import json
74 74 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
75 75 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
76 76 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
77 77 AttributeDict, safe_int, md5, md5_safe
78 78 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
79 79 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
80 80 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
81 81 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
82 82 from rhodecode.model.changeset_status import ChangesetStatusModel
83 83 from rhodecode.model.db import Permission, User, Repository
84 84 from rhodecode.model.repo_group import RepoGroupModel
85 85 from rhodecode.model.settings import IssueTrackerSettingsModel
86 86
87 87 log = logging.getLogger(__name__)
88 88
89 89
90 90 DEFAULT_USER = User.DEFAULT_USER
91 91 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
92 92
93 93
94 94 def asset(path, ver=None, **kwargs):
95 95 """
96 96 Helper to generate a static asset file path for rhodecode assets
97 97
98 98 eg. h.asset('images/image.png', ver='3923')
99 99
100 100 :param path: path of asset
101 101 :param ver: optional version query param to append as ?ver=
102 102 """
103 103 request = get_current_request()
104 104 query = {}
105 105 query.update(kwargs)
106 106 if ver:
107 107 query = {'ver': ver}
108 108 return request.static_path(
109 109 'rhodecode:public/{}'.format(path), _query=query)
110 110
111 111
112 112 default_html_escape_table = {
113 113 ord('&'): u'&amp;',
114 114 ord('<'): u'&lt;',
115 115 ord('>'): u'&gt;',
116 116 ord('"'): u'&quot;',
117 117 ord("'"): u'&#39;',
118 118 }
119 119
120 120
121 121 def html_escape(text, html_escape_table=default_html_escape_table):
122 122 """Produce entities within text."""
123 123 return text.translate(html_escape_table)
124 124
125 125
126 126 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
127 127 """
128 128 Truncate string ``s`` at the first occurrence of ``sub``.
129 129
130 130 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
131 131 """
132 132 suffix_if_chopped = suffix_if_chopped or ''
133 133 pos = s.find(sub)
134 134 if pos == -1:
135 135 return s
136 136
137 137 if inclusive:
138 138 pos += len(sub)
139 139
140 140 chopped = s[:pos]
141 141 left = s[pos:].strip()
142 142
143 143 if left and suffix_if_chopped:
144 144 chopped += suffix_if_chopped
145 145
146 146 return chopped
147 147
148 148
149 149 def shorter(text, size=20):
150 150 postfix = '...'
151 151 if len(text) > size:
152 152 return text[:size - len(postfix)] + postfix
153 153 return text
154 154
155 155
156 156 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
157 157 """
158 158 Reset button
159 159 """
160 160 _set_input_attrs(attrs, type, name, value)
161 161 _set_id_attr(attrs, id, name)
162 162 convert_boolean_attrs(attrs, ["disabled"])
163 163 return HTML.input(**attrs)
164 164
165 165 reset = _reset
166 166 safeid = _make_safe_id_component
167 167
168 168
169 169 def branding(name, length=40):
170 170 return truncate(name, length, indicator="")
171 171
172 172
173 173 def FID(raw_id, path):
174 174 """
175 175 Creates a unique ID for filenode based on it's hash of path and commit
176 176 it's safe to use in urls
177 177
178 178 :param raw_id:
179 179 :param path:
180 180 """
181 181
182 182 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
183 183
184 184
185 185 class _GetError(object):
186 186 """Get error from form_errors, and represent it as span wrapped error
187 187 message
188 188
189 189 :param field_name: field to fetch errors for
190 190 :param form_errors: form errors dict
191 191 """
192 192
193 193 def __call__(self, field_name, form_errors):
194 194 tmpl = """<span class="error_msg">%s</span>"""
195 195 if form_errors and field_name in form_errors:
196 196 return literal(tmpl % form_errors.get(field_name))
197 197
198 198 get_error = _GetError()
199 199
200 200
201 201 class _ToolTip(object):
202 202
203 203 def __call__(self, tooltip_title, trim_at=50):
204 204 """
205 205 Special function just to wrap our text into nice formatted
206 206 autowrapped text
207 207
208 208 :param tooltip_title:
209 209 """
210 210 tooltip_title = escape(tooltip_title)
211 211 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
212 212 return tooltip_title
213 213 tooltip = _ToolTip()
214 214
215 215
216 216 def files_breadcrumbs(repo_name, commit_id, file_path):
217 217 if isinstance(file_path, str):
218 218 file_path = safe_unicode(file_path)
219 219
220 220 # TODO: johbo: Is this always a url like path, or is this operating
221 221 # system dependent?
222 222 path_segments = file_path.split('/')
223 223
224 224 repo_name_html = escape(repo_name)
225 225 if len(path_segments) == 1 and path_segments[0] == '':
226 226 url_segments = [repo_name_html]
227 227 else:
228 228 url_segments = [
229 229 link_to(
230 230 repo_name_html,
231 231 route_path(
232 232 'repo_files',
233 233 repo_name=repo_name,
234 234 commit_id=commit_id,
235 235 f_path=''),
236 236 class_='pjax-link')]
237 237
238 238 last_cnt = len(path_segments) - 1
239 239 for cnt, segment in enumerate(path_segments):
240 240 if not segment:
241 241 continue
242 242 segment_html = escape(segment)
243 243
244 244 if cnt != last_cnt:
245 245 url_segments.append(
246 246 link_to(
247 247 segment_html,
248 248 route_path(
249 249 'repo_files',
250 250 repo_name=repo_name,
251 251 commit_id=commit_id,
252 252 f_path='/'.join(path_segments[:cnt + 1])),
253 253 class_='pjax-link'))
254 254 else:
255 255 url_segments.append(segment_html)
256 256
257 257 return literal('/'.join(url_segments))
258 258
259 259
260 260 class CodeHtmlFormatter(HtmlFormatter):
261 261 """
262 262 My code Html Formatter for source codes
263 263 """
264 264
265 265 def wrap(self, source, outfile):
266 266 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
267 267
268 268 def _wrap_code(self, source):
269 269 for cnt, it in enumerate(source):
270 270 i, t = it
271 271 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
272 272 yield i, t
273 273
274 274 def _wrap_tablelinenos(self, inner):
275 275 dummyoutfile = StringIO.StringIO()
276 276 lncount = 0
277 277 for t, line in inner:
278 278 if t:
279 279 lncount += 1
280 280 dummyoutfile.write(line)
281 281
282 282 fl = self.linenostart
283 283 mw = len(str(lncount + fl - 1))
284 284 sp = self.linenospecial
285 285 st = self.linenostep
286 286 la = self.lineanchors
287 287 aln = self.anchorlinenos
288 288 nocls = self.noclasses
289 289 if sp:
290 290 lines = []
291 291
292 292 for i in range(fl, fl + lncount):
293 293 if i % st == 0:
294 294 if i % sp == 0:
295 295 if aln:
296 296 lines.append('<a href="#%s%d" class="special">%*d</a>' %
297 297 (la, i, mw, i))
298 298 else:
299 299 lines.append('<span class="special">%*d</span>' % (mw, i))
300 300 else:
301 301 if aln:
302 302 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
303 303 else:
304 304 lines.append('%*d' % (mw, i))
305 305 else:
306 306 lines.append('')
307 307 ls = '\n'.join(lines)
308 308 else:
309 309 lines = []
310 310 for i in range(fl, fl + lncount):
311 311 if i % st == 0:
312 312 if aln:
313 313 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
314 314 else:
315 315 lines.append('%*d' % (mw, i))
316 316 else:
317 317 lines.append('')
318 318 ls = '\n'.join(lines)
319 319
320 320 # in case you wonder about the seemingly redundant <div> here: since the
321 321 # content in the other cell also is wrapped in a div, some browsers in
322 322 # some configurations seem to mess up the formatting...
323 323 if nocls:
324 324 yield 0, ('<table class="%stable">' % self.cssclass +
325 325 '<tr><td><div class="linenodiv" '
326 326 'style="background-color: #f0f0f0; padding-right: 10px">'
327 327 '<pre style="line-height: 125%">' +
328 328 ls + '</pre></div></td><td id="hlcode" class="code">')
329 329 else:
330 330 yield 0, ('<table class="%stable">' % self.cssclass +
331 331 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
332 332 ls + '</pre></div></td><td id="hlcode" class="code">')
333 333 yield 0, dummyoutfile.getvalue()
334 334 yield 0, '</td></tr></table>'
335 335
336 336
337 337 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
338 338 def __init__(self, **kw):
339 339 # only show these line numbers if set
340 340 self.only_lines = kw.pop('only_line_numbers', [])
341 341 self.query_terms = kw.pop('query_terms', [])
342 342 self.max_lines = kw.pop('max_lines', 5)
343 343 self.line_context = kw.pop('line_context', 3)
344 344 self.url = kw.pop('url', None)
345 345
346 346 super(CodeHtmlFormatter, self).__init__(**kw)
347 347
348 348 def _wrap_code(self, source):
349 349 for cnt, it in enumerate(source):
350 350 i, t = it
351 351 t = '<pre>%s</pre>' % t
352 352 yield i, t
353 353
354 354 def _wrap_tablelinenos(self, inner):
355 355 yield 0, '<table class="code-highlight %stable">' % self.cssclass
356 356
357 357 last_shown_line_number = 0
358 358 current_line_number = 1
359 359
360 360 for t, line in inner:
361 361 if not t:
362 362 yield t, line
363 363 continue
364 364
365 365 if current_line_number in self.only_lines:
366 366 if last_shown_line_number + 1 != current_line_number:
367 367 yield 0, '<tr>'
368 368 yield 0, '<td class="line">...</td>'
369 369 yield 0, '<td id="hlcode" class="code"></td>'
370 370 yield 0, '</tr>'
371 371
372 372 yield 0, '<tr>'
373 373 if self.url:
374 374 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
375 375 self.url, current_line_number, current_line_number)
376 376 else:
377 377 yield 0, '<td class="line"><a href="">%i</a></td>' % (
378 378 current_line_number)
379 379 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
380 380 yield 0, '</tr>'
381 381
382 382 last_shown_line_number = current_line_number
383 383
384 384 current_line_number += 1
385 385
386 386
387 387 yield 0, '</table>'
388 388
389 389
390 390 def extract_phrases(text_query):
391 391 """
392 392 Extracts phrases from search term string making sure phrases
393 393 contained in double quotes are kept together - and discarding empty values
394 394 or fully whitespace values eg.
395 395
396 396 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
397 397
398 398 """
399 399
400 400 in_phrase = False
401 401 buf = ''
402 402 phrases = []
403 403 for char in text_query:
404 404 if in_phrase:
405 405 if char == '"': # end phrase
406 406 phrases.append(buf)
407 407 buf = ''
408 408 in_phrase = False
409 409 continue
410 410 else:
411 411 buf += char
412 412 continue
413 413 else:
414 414 if char == '"': # start phrase
415 415 in_phrase = True
416 416 phrases.append(buf)
417 417 buf = ''
418 418 continue
419 419 elif char == ' ':
420 420 phrases.append(buf)
421 421 buf = ''
422 422 continue
423 423 else:
424 424 buf += char
425 425
426 426 phrases.append(buf)
427 427 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
428 428 return phrases
429 429
430 430
431 431 def get_matching_offsets(text, phrases):
432 432 """
433 433 Returns a list of string offsets in `text` that the list of `terms` match
434 434
435 435 >>> get_matching_offsets('some text here', ['some', 'here'])
436 436 [(0, 4), (10, 14)]
437 437
438 438 """
439 439 offsets = []
440 440 for phrase in phrases:
441 441 for match in re.finditer(phrase, text):
442 442 offsets.append((match.start(), match.end()))
443 443
444 444 return offsets
445 445
446 446
447 447 def normalize_text_for_matching(x):
448 448 """
449 449 Replaces all non alnum characters to spaces and lower cases the string,
450 450 useful for comparing two text strings without punctuation
451 451 """
452 452 return re.sub(r'[^\w]', ' ', x.lower())
453 453
454 454
455 455 def get_matching_line_offsets(lines, terms):
456 456 """ Return a set of `lines` indices (starting from 1) matching a
457 457 text search query, along with `context` lines above/below matching lines
458 458
459 459 :param lines: list of strings representing lines
460 460 :param terms: search term string to match in lines eg. 'some text'
461 461 :param context: number of lines above/below a matching line to add to result
462 462 :param max_lines: cut off for lines of interest
463 463 eg.
464 464
465 465 text = '''
466 466 words words words
467 467 words words words
468 468 some text some
469 469 words words words
470 470 words words words
471 471 text here what
472 472 '''
473 473 get_matching_line_offsets(text, 'text', context=1)
474 474 {3: [(5, 9)], 6: [(0, 4)]]
475 475
476 476 """
477 477 matching_lines = {}
478 478 phrases = [normalize_text_for_matching(phrase)
479 479 for phrase in extract_phrases(terms)]
480 480
481 481 for line_index, line in enumerate(lines, start=1):
482 482 match_offsets = get_matching_offsets(
483 483 normalize_text_for_matching(line), phrases)
484 484 if match_offsets:
485 485 matching_lines[line_index] = match_offsets
486 486
487 487 return matching_lines
488 488
489 489
490 490 def hsv_to_rgb(h, s, v):
491 491 """ Convert hsv color values to rgb """
492 492
493 493 if s == 0.0:
494 494 return v, v, v
495 495 i = int(h * 6.0) # XXX assume int() truncates!
496 496 f = (h * 6.0) - i
497 497 p = v * (1.0 - s)
498 498 q = v * (1.0 - s * f)
499 499 t = v * (1.0 - s * (1.0 - f))
500 500 i = i % 6
501 501 if i == 0:
502 502 return v, t, p
503 503 if i == 1:
504 504 return q, v, p
505 505 if i == 2:
506 506 return p, v, t
507 507 if i == 3:
508 508 return p, q, v
509 509 if i == 4:
510 510 return t, p, v
511 511 if i == 5:
512 512 return v, p, q
513 513
514 514
515 515 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
516 516 """
517 517 Generator for getting n of evenly distributed colors using
518 518 hsv color and golden ratio. It always return same order of colors
519 519
520 520 :param n: number of colors to generate
521 521 :param saturation: saturation of returned colors
522 522 :param lightness: lightness of returned colors
523 523 :returns: RGB tuple
524 524 """
525 525
526 526 golden_ratio = 0.618033988749895
527 527 h = 0.22717784590367374
528 528
529 529 for _ in xrange(n):
530 530 h += golden_ratio
531 531 h %= 1
532 532 HSV_tuple = [h, saturation, lightness]
533 533 RGB_tuple = hsv_to_rgb(*HSV_tuple)
534 534 yield map(lambda x: str(int(x * 256)), RGB_tuple)
535 535
536 536
537 537 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
538 538 """
539 539 Returns a function which when called with an argument returns a unique
540 540 color for that argument, eg.
541 541
542 542 :param n: number of colors to generate
543 543 :param saturation: saturation of returned colors
544 544 :param lightness: lightness of returned colors
545 545 :returns: css RGB string
546 546
547 547 >>> color_hash = color_hasher()
548 548 >>> color_hash('hello')
549 549 'rgb(34, 12, 59)'
550 550 >>> color_hash('hello')
551 551 'rgb(34, 12, 59)'
552 552 >>> color_hash('other')
553 553 'rgb(90, 224, 159)'
554 554 """
555 555
556 556 color_dict = {}
557 557 cgenerator = unique_color_generator(
558 558 saturation=saturation, lightness=lightness)
559 559
560 560 def get_color_string(thing):
561 561 if thing in color_dict:
562 562 col = color_dict[thing]
563 563 else:
564 564 col = color_dict[thing] = cgenerator.next()
565 565 return "rgb(%s)" % (', '.join(col))
566 566
567 567 return get_color_string
568 568
569 569
570 570 def get_lexer_safe(mimetype=None, filepath=None):
571 571 """
572 572 Tries to return a relevant pygments lexer using mimetype/filepath name,
573 573 defaulting to plain text if none could be found
574 574 """
575 575 lexer = None
576 576 try:
577 577 if mimetype:
578 578 lexer = get_lexer_for_mimetype(mimetype)
579 579 if not lexer:
580 580 lexer = get_lexer_for_filename(filepath)
581 581 except pygments.util.ClassNotFound:
582 582 pass
583 583
584 584 if not lexer:
585 585 lexer = get_lexer_by_name('text')
586 586
587 587 return lexer
588 588
589 589
590 590 def get_lexer_for_filenode(filenode):
591 591 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
592 592 return lexer
593 593
594 594
595 595 def pygmentize(filenode, **kwargs):
596 596 """
597 597 pygmentize function using pygments
598 598
599 599 :param filenode:
600 600 """
601 601 lexer = get_lexer_for_filenode(filenode)
602 602 return literal(code_highlight(filenode.content, lexer,
603 603 CodeHtmlFormatter(**kwargs)))
604 604
605 605
606 606 def is_following_repo(repo_name, user_id):
607 607 from rhodecode.model.scm import ScmModel
608 608 return ScmModel().is_following_repo(repo_name, user_id)
609 609
610 610
611 611 class _Message(object):
612 612 """A message returned by ``Flash.pop_messages()``.
613 613
614 614 Converting the message to a string returns the message text. Instances
615 615 also have the following attributes:
616 616
617 617 * ``message``: the message text.
618 618 * ``category``: the category specified when the message was created.
619 619 """
620 620
621 621 def __init__(self, category, message):
622 622 self.category = category
623 623 self.message = message
624 624
625 625 def __str__(self):
626 626 return self.message
627 627
628 628 __unicode__ = __str__
629 629
630 630 def __html__(self):
631 631 return escape(safe_unicode(self.message))
632 632
633 633
634 634 class Flash(object):
635 635 # List of allowed categories. If None, allow any category.
636 636 categories = ["warning", "notice", "error", "success"]
637 637
638 638 # Default category if none is specified.
639 639 default_category = "notice"
640 640
641 641 def __init__(self, session_key="flash", categories=None,
642 642 default_category=None):
643 643 """
644 644 Instantiate a ``Flash`` object.
645 645
646 646 ``session_key`` is the key to save the messages under in the user's
647 647 session.
648 648
649 649 ``categories`` is an optional list which overrides the default list
650 650 of categories.
651 651
652 652 ``default_category`` overrides the default category used for messages
653 653 when none is specified.
654 654 """
655 655 self.session_key = session_key
656 656 if categories is not None:
657 657 self.categories = categories
658 658 if default_category is not None:
659 659 self.default_category = default_category
660 660 if self.categories and self.default_category not in self.categories:
661 661 raise ValueError(
662 662 "unrecognized default category %r" % (self.default_category,))
663 663
664 664 def pop_messages(self, session=None, request=None):
665 665 """
666 666 Return all accumulated messages and delete them from the session.
667 667
668 668 The return value is a list of ``Message`` objects.
669 669 """
670 670 messages = []
671 671
672 672 if not session:
673 673 if not request:
674 674 request = get_current_request()
675 675 session = request.session
676 676
677 677 # Pop the 'old' pylons flash messages. They are tuples of the form
678 678 # (category, message)
679 679 for cat, msg in session.pop(self.session_key, []):
680 680 messages.append(_Message(cat, msg))
681 681
682 682 # Pop the 'new' pyramid flash messages for each category as list
683 683 # of strings.
684 684 for cat in self.categories:
685 685 for msg in session.pop_flash(queue=cat):
686 686 messages.append(_Message(cat, msg))
687 687 # Map messages from the default queue to the 'notice' category.
688 688 for msg in session.pop_flash():
689 689 messages.append(_Message('notice', msg))
690 690
691 691 session.save()
692 692 return messages
693 693
694 694 def json_alerts(self, session=None, request=None):
695 695 payloads = []
696 696 messages = flash.pop_messages(session=session, request=request)
697 697 if messages:
698 698 for message in messages:
699 699 subdata = {}
700 700 if hasattr(message.message, 'rsplit'):
701 701 flash_data = message.message.rsplit('|DELIM|', 1)
702 702 org_message = flash_data[0]
703 703 if len(flash_data) > 1:
704 704 subdata = json.loads(flash_data[1])
705 705 else:
706 706 org_message = message.message
707 707 payloads.append({
708 708 'message': {
709 709 'message': u'{}'.format(org_message),
710 710 'level': message.category,
711 711 'force': True,
712 712 'subdata': subdata
713 713 }
714 714 })
715 715 return json.dumps(payloads)
716 716
717 717 def __call__(self, message, category=None, ignore_duplicate=False,
718 718 session=None, request=None):
719 719
720 720 if not session:
721 721 if not request:
722 722 request = get_current_request()
723 723 session = request.session
724 724
725 725 session.flash(
726 726 message, queue=category, allow_duplicate=not ignore_duplicate)
727 727
728 728
729 729 flash = Flash()
730 730
731 731 #==============================================================================
732 732 # SCM FILTERS available via h.
733 733 #==============================================================================
734 734 from rhodecode.lib.vcs.utils import author_name, author_email
735 735 from rhodecode.lib.utils2 import credentials_filter, age as _age
736 736 from rhodecode.model.db import User, ChangesetStatus
737 737
738 738 age = _age
739 739 capitalize = lambda x: x.capitalize()
740 740 email = author_email
741 741 short_id = lambda x: x[:12]
742 742 hide_credentials = lambda x: ''.join(credentials_filter(x))
743 743
744 744
745 745 def age_component(datetime_iso, value=None, time_is_local=False):
746 746 title = value or format_date(datetime_iso)
747 747 tzinfo = '+00:00'
748 748
749 749 # detect if we have a timezone info, otherwise, add it
750 750 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
751 751 if time_is_local:
752 752 tzinfo = time.strftime("+%H:%M",
753 753 time.gmtime(
754 754 (datetime.now() - datetime.utcnow()).seconds + 1
755 755 )
756 756 )
757 757
758 758 return literal(
759 759 '<time class="timeago tooltip" '
760 760 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
761 761 datetime_iso, title, tzinfo))
762 762
763 763
764 764 def _shorten_commit_id(commit_id):
765 765 from rhodecode import CONFIG
766 766 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
767 767 return commit_id[:def_len]
768 768
769 769
770 770 def show_id(commit):
771 771 """
772 772 Configurable function that shows ID
773 773 by default it's r123:fffeeefffeee
774 774
775 775 :param commit: commit instance
776 776 """
777 777 from rhodecode import CONFIG
778 778 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
779 779
780 780 raw_id = _shorten_commit_id(commit.raw_id)
781 781 if show_idx:
782 782 return 'r%s:%s' % (commit.idx, raw_id)
783 783 else:
784 784 return '%s' % (raw_id, )
785 785
786 786
787 787 def format_date(date):
788 788 """
789 789 use a standardized formatting for dates used in RhodeCode
790 790
791 791 :param date: date/datetime object
792 792 :return: formatted date
793 793 """
794 794
795 795 if date:
796 796 _fmt = "%a, %d %b %Y %H:%M:%S"
797 797 return safe_unicode(date.strftime(_fmt))
798 798
799 799 return u""
800 800
801 801
802 802 class _RepoChecker(object):
803 803
804 804 def __init__(self, backend_alias):
805 805 self._backend_alias = backend_alias
806 806
807 807 def __call__(self, repository):
808 808 if hasattr(repository, 'alias'):
809 809 _type = repository.alias
810 810 elif hasattr(repository, 'repo_type'):
811 811 _type = repository.repo_type
812 812 else:
813 813 _type = repository
814 814 return _type == self._backend_alias
815 815
816 816 is_git = _RepoChecker('git')
817 817 is_hg = _RepoChecker('hg')
818 818 is_svn = _RepoChecker('svn')
819 819
820 820
821 821 def get_repo_type_by_name(repo_name):
822 822 repo = Repository.get_by_repo_name(repo_name)
823 823 return repo.repo_type
824 824
825 825
826 826 def is_svn_without_proxy(repository):
827 827 if is_svn(repository):
828 828 from rhodecode.model.settings import VcsSettingsModel
829 829 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
830 830 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
831 831 return False
832 832
833 833
834 834 def discover_user(author):
835 835 """
836 836 Tries to discover RhodeCode User based on the autho string. Author string
837 837 is typically `FirstName LastName <email@address.com>`
838 838 """
839 839
840 840 # if author is already an instance use it for extraction
841 841 if isinstance(author, User):
842 842 return author
843 843
844 844 # Valid email in the attribute passed, see if they're in the system
845 845 _email = author_email(author)
846 846 if _email != '':
847 847 user = User.get_by_email(_email, case_insensitive=True, cache=True)
848 848 if user is not None:
849 849 return user
850 850
851 851 # Maybe it's a username, we try to extract it and fetch by username ?
852 852 _author = author_name(author)
853 853 user = User.get_by_username(_author, case_insensitive=True, cache=True)
854 854 if user is not None:
855 855 return user
856 856
857 857 return None
858 858
859 859
860 860 def email_or_none(author):
861 861 # extract email from the commit string
862 862 _email = author_email(author)
863 863
864 864 # If we have an email, use it, otherwise
865 865 # see if it contains a username we can get an email from
866 866 if _email != '':
867 867 return _email
868 868 else:
869 869 user = User.get_by_username(
870 870 author_name(author), case_insensitive=True, cache=True)
871 871
872 872 if user is not None:
873 873 return user.email
874 874
875 875 # No valid email, not a valid user in the system, none!
876 876 return None
877 877
878 878
879 879 def link_to_user(author, length=0, **kwargs):
880 880 user = discover_user(author)
881 881 # user can be None, but if we have it already it means we can re-use it
882 882 # in the person() function, so we save 1 intensive-query
883 883 if user:
884 884 author = user
885 885
886 886 display_person = person(author, 'username_or_name_or_email')
887 887 if length:
888 888 display_person = shorter(display_person, length)
889 889
890 890 if user:
891 891 return link_to(
892 892 escape(display_person),
893 893 route_path('user_profile', username=user.username),
894 894 **kwargs)
895 895 else:
896 896 return escape(display_person)
897 897
898 898
899 899 def person(author, show_attr="username_and_name"):
900 900 user = discover_user(author)
901 901 if user:
902 902 return getattr(user, show_attr)
903 903 else:
904 904 _author = author_name(author)
905 905 _email = email(author)
906 906 return _author or _email
907 907
908 908
909 909 def author_string(email):
910 910 if email:
911 911 user = User.get_by_email(email, case_insensitive=True, cache=True)
912 912 if user:
913 913 if user.first_name or user.last_name:
914 914 return '%s %s &lt;%s&gt;' % (
915 915 user.first_name, user.last_name, email)
916 916 else:
917 917 return email
918 918 else:
919 919 return email
920 920 else:
921 921 return None
922 922
923 923
924 924 def person_by_id(id_, show_attr="username_and_name"):
925 925 # attr to return from fetched user
926 926 person_getter = lambda usr: getattr(usr, show_attr)
927 927
928 928 #maybe it's an ID ?
929 929 if str(id_).isdigit() or isinstance(id_, int):
930 930 id_ = int(id_)
931 931 user = User.get(id_)
932 932 if user is not None:
933 933 return person_getter(user)
934 934 return id_
935 935
936 936
937 937 def gravatar_with_user(request, author, show_disabled=False):
938 938 _render = request.get_partial_renderer(
939 939 'rhodecode:templates/base/base.mako')
940 940 return _render('gravatar_with_user', author, show_disabled=show_disabled)
941 941
942 942
943 943 tags_paterns = OrderedDict((
944 944 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
945 945 '<div class="metatag" tag="lang">\\2</div>')),
946 946
947 947 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
948 948 '<div class="metatag" tag="see">see: \\1 </div>')),
949 949
950 950 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
951 951 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
952 952
953 953 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
954 954 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
955 955
956 956 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
957 957 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
958 958
959 959 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
960 960 '<div class="metatag" tag="state \\1">\\1</div>')),
961 961
962 962 # label in grey
963 963 ('label', (re.compile(r'\[([a-z]+)\]'),
964 964 '<div class="metatag" tag="label">\\1</div>')),
965 965
966 966 # generic catch all in grey
967 967 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
968 968 '<div class="metatag" tag="generic">\\1</div>')),
969 969 ))
970 970
971 971
972 972 def extract_metatags(value):
973 973 """
974 974 Extract supported meta-tags from given text value
975 975 """
976 976 tags = []
977 977 if not value:
978 978 return tags, ''
979 979
980 980 for key, val in tags_paterns.items():
981 981 pat, replace_html = val
982 982 tags.extend([(key, x.group()) for x in pat.finditer(value)])
983 983 value = pat.sub('', value)
984 984
985 985 return tags, value
986 986
987 987
988 988 def style_metatag(tag_type, value):
989 989 """
990 990 converts tags from value into html equivalent
991 991 """
992 992 if not value:
993 993 return ''
994 994
995 995 html_value = value
996 996 tag_data = tags_paterns.get(tag_type)
997 997 if tag_data:
998 998 pat, replace_html = tag_data
999 999 # convert to plain `unicode` instead of a markup tag to be used in
1000 1000 # regex expressions. safe_unicode doesn't work here
1001 1001 html_value = pat.sub(replace_html, unicode(value))
1002 1002
1003 1003 return html_value
1004 1004
1005 1005
1006 1006 def bool2icon(value):
1007 1007 """
1008 1008 Returns boolean value of a given value, represented as html element with
1009 1009 classes that will represent icons
1010 1010
1011 1011 :param value: given value to convert to html node
1012 1012 """
1013 1013
1014 1014 if value: # does bool conversion
1015 1015 return HTML.tag('i', class_="icon-true")
1016 1016 else: # not true as bool
1017 1017 return HTML.tag('i', class_="icon-false")
1018 1018
1019 1019
1020 1020 #==============================================================================
1021 1021 # PERMS
1022 1022 #==============================================================================
1023 1023 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1024 1024 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1025 1025 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1026 1026 csrf_token_key
1027 1027
1028 1028
1029 1029 #==============================================================================
1030 1030 # GRAVATAR URL
1031 1031 #==============================================================================
1032 1032 class InitialsGravatar(object):
1033 1033 def __init__(self, email_address, first_name, last_name, size=30,
1034 1034 background=None, text_color='#fff'):
1035 1035 self.size = size
1036 1036 self.first_name = first_name
1037 1037 self.last_name = last_name
1038 1038 self.email_address = email_address
1039 1039 self.background = background or self.str2color(email_address)
1040 1040 self.text_color = text_color
1041 1041
1042 1042 def get_color_bank(self):
1043 1043 """
1044 1044 returns a predefined list of colors that gravatars can use.
1045 1045 Those are randomized distinct colors that guarantee readability and
1046 1046 uniqueness.
1047 1047
1048 1048 generated with: http://phrogz.net/css/distinct-colors.html
1049 1049 """
1050 1050 return [
1051 1051 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1052 1052 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1053 1053 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1054 1054 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1055 1055 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1056 1056 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1057 1057 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1058 1058 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1059 1059 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1060 1060 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1061 1061 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1062 1062 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1063 1063 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1064 1064 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1065 1065 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1066 1066 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1067 1067 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1068 1068 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1069 1069 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1070 1070 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1071 1071 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1072 1072 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1073 1073 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1074 1074 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1075 1075 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1076 1076 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1077 1077 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1078 1078 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1079 1079 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1080 1080 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1081 1081 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1082 1082 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1083 1083 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1084 1084 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1085 1085 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1086 1086 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1087 1087 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1088 1088 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1089 1089 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1090 1090 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1091 1091 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1092 1092 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1093 1093 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1094 1094 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1095 1095 '#4f8c46', '#368dd9', '#5c0073'
1096 1096 ]
1097 1097
1098 1098 def rgb_to_hex_color(self, rgb_tuple):
1099 1099 """
1100 1100 Converts an rgb_tuple passed to an hex color.
1101 1101
1102 1102 :param rgb_tuple: tuple with 3 ints represents rgb color space
1103 1103 """
1104 1104 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1105 1105
1106 1106 def email_to_int_list(self, email_str):
1107 1107 """
1108 1108 Get every byte of the hex digest value of email and turn it to integer.
1109 1109 It's going to be always between 0-255
1110 1110 """
1111 1111 digest = md5_safe(email_str.lower())
1112 1112 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1113 1113
1114 1114 def pick_color_bank_index(self, email_str, color_bank):
1115 1115 return self.email_to_int_list(email_str)[0] % len(color_bank)
1116 1116
1117 1117 def str2color(self, email_str):
1118 1118 """
1119 1119 Tries to map in a stable algorithm an email to color
1120 1120
1121 1121 :param email_str:
1122 1122 """
1123 1123 color_bank = self.get_color_bank()
1124 1124 # pick position (module it's length so we always find it in the
1125 1125 # bank even if it's smaller than 256 values
1126 1126 pos = self.pick_color_bank_index(email_str, color_bank)
1127 1127 return color_bank[pos]
1128 1128
1129 1129 def normalize_email(self, email_address):
1130 1130 import unicodedata
1131 1131 # default host used to fill in the fake/missing email
1132 1132 default_host = u'localhost'
1133 1133
1134 1134 if not email_address:
1135 1135 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1136 1136
1137 1137 email_address = safe_unicode(email_address)
1138 1138
1139 1139 if u'@' not in email_address:
1140 1140 email_address = u'%s@%s' % (email_address, default_host)
1141 1141
1142 1142 if email_address.endswith(u'@'):
1143 1143 email_address = u'%s%s' % (email_address, default_host)
1144 1144
1145 1145 email_address = unicodedata.normalize('NFKD', email_address)\
1146 1146 .encode('ascii', 'ignore')
1147 1147 return email_address
1148 1148
1149 1149 def get_initials(self):
1150 1150 """
1151 1151 Returns 2 letter initials calculated based on the input.
1152 1152 The algorithm picks first given email address, and takes first letter
1153 1153 of part before @, and then the first letter of server name. In case
1154 1154 the part before @ is in a format of `somestring.somestring2` it replaces
1155 1155 the server letter with first letter of somestring2
1156 1156
1157 1157 In case function was initialized with both first and lastname, this
1158 1158 overrides the extraction from email by first letter of the first and
1159 1159 last name. We add special logic to that functionality, In case Full name
1160 1160 is compound, like Guido Von Rossum, we use last part of the last name
1161 1161 (Von Rossum) picking `R`.
1162 1162
1163 1163 Function also normalizes the non-ascii characters to they ascii
1164 1164 representation, eg Ą => A
1165 1165 """
1166 1166 import unicodedata
1167 1167 # replace non-ascii to ascii
1168 1168 first_name = unicodedata.normalize(
1169 1169 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1170 1170 last_name = unicodedata.normalize(
1171 1171 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1172 1172
1173 1173 # do NFKD encoding, and also make sure email has proper format
1174 1174 email_address = self.normalize_email(self.email_address)
1175 1175
1176 1176 # first push the email initials
1177 1177 prefix, server = email_address.split('@', 1)
1178 1178
1179 1179 # check if prefix is maybe a 'first_name.last_name' syntax
1180 1180 _dot_split = prefix.rsplit('.', 1)
1181 1181 if len(_dot_split) == 2 and _dot_split[1]:
1182 1182 initials = [_dot_split[0][0], _dot_split[1][0]]
1183 1183 else:
1184 1184 initials = [prefix[0], server[0]]
1185 1185
1186 1186 # then try to replace either first_name or last_name
1187 1187 fn_letter = (first_name or " ")[0].strip()
1188 1188 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1189 1189
1190 1190 if fn_letter:
1191 1191 initials[0] = fn_letter
1192 1192
1193 1193 if ln_letter:
1194 1194 initials[1] = ln_letter
1195 1195
1196 1196 return ''.join(initials).upper()
1197 1197
1198 1198 def get_img_data_by_type(self, font_family, img_type):
1199 1199 default_user = """
1200 1200 <svg xmlns="http://www.w3.org/2000/svg"
1201 1201 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1202 1202 viewBox="-15 -10 439.165 429.164"
1203 1203
1204 1204 xml:space="preserve"
1205 1205 style="background:{background};" >
1206 1206
1207 1207 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1208 1208 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1209 1209 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1210 1210 168.596,153.916,216.671,
1211 1211 204.583,216.671z" fill="{text_color}"/>
1212 1212 <path d="M407.164,374.717L360.88,
1213 1213 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1214 1214 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1215 1215 15.366-44.203,23.488-69.076,23.488c-24.877,
1216 1216 0-48.762-8.122-69.078-23.488
1217 1217 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1218 1218 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1219 1219 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1220 1220 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1221 1221 19.402-10.527 C409.699,390.129,
1222 1222 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1223 1223 </svg>""".format(
1224 1224 size=self.size,
1225 1225 background='#979797', # @grey4
1226 1226 text_color=self.text_color,
1227 1227 font_family=font_family)
1228 1228
1229 1229 return {
1230 1230 "default_user": default_user
1231 1231 }[img_type]
1232 1232
1233 1233 def get_img_data(self, svg_type=None):
1234 1234 """
1235 1235 generates the svg metadata for image
1236 1236 """
1237 1237
1238 1238 font_family = ','.join([
1239 1239 'proximanovaregular',
1240 1240 'Proxima Nova Regular',
1241 1241 'Proxima Nova',
1242 1242 'Arial',
1243 1243 'Lucida Grande',
1244 1244 'sans-serif'
1245 1245 ])
1246 1246 if svg_type:
1247 1247 return self.get_img_data_by_type(font_family, svg_type)
1248 1248
1249 1249 initials = self.get_initials()
1250 1250 img_data = """
1251 1251 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1252 1252 width="{size}" height="{size}"
1253 1253 style="width: 100%; height: 100%; background-color: {background}"
1254 1254 viewBox="0 0 {size} {size}">
1255 1255 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1256 1256 pointer-events="auto" fill="{text_color}"
1257 1257 font-family="{font_family}"
1258 1258 style="font-weight: 400; font-size: {f_size}px;">{text}
1259 1259 </text>
1260 1260 </svg>""".format(
1261 1261 size=self.size,
1262 1262 f_size=self.size/1.85, # scale the text inside the box nicely
1263 1263 background=self.background,
1264 1264 text_color=self.text_color,
1265 1265 text=initials.upper(),
1266 1266 font_family=font_family)
1267 1267
1268 1268 return img_data
1269 1269
1270 1270 def generate_svg(self, svg_type=None):
1271 1271 img_data = self.get_img_data(svg_type)
1272 1272 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1273 1273
1274 1274
1275 1275 def initials_gravatar(email_address, first_name, last_name, size=30):
1276 1276 svg_type = None
1277 1277 if email_address == User.DEFAULT_USER_EMAIL:
1278 1278 svg_type = 'default_user'
1279 1279 klass = InitialsGravatar(email_address, first_name, last_name, size)
1280 1280 return klass.generate_svg(svg_type=svg_type)
1281 1281
1282 1282
1283 1283 def gravatar_url(email_address, size=30, request=None):
1284 1284 request = get_current_request()
1285 1285 _use_gravatar = request.call_context.visual.use_gravatar
1286 1286 _gravatar_url = request.call_context.visual.gravatar_url
1287 1287
1288 1288 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1289 1289
1290 1290 email_address = email_address or User.DEFAULT_USER_EMAIL
1291 1291 if isinstance(email_address, unicode):
1292 1292 # hashlib crashes on unicode items
1293 1293 email_address = safe_str(email_address)
1294 1294
1295 1295 # empty email or default user
1296 1296 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1297 1297 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1298 1298
1299 1299 if _use_gravatar:
1300 1300 # TODO: Disuse pyramid thread locals. Think about another solution to
1301 1301 # get the host and schema here.
1302 1302 request = get_current_request()
1303 1303 tmpl = safe_str(_gravatar_url)
1304 1304 tmpl = tmpl.replace('{email}', email_address)\
1305 1305 .replace('{md5email}', md5_safe(email_address.lower())) \
1306 1306 .replace('{netloc}', request.host)\
1307 1307 .replace('{scheme}', request.scheme)\
1308 1308 .replace('{size}', safe_str(size))
1309 1309 return tmpl
1310 1310 else:
1311 1311 return initials_gravatar(email_address, '', '', size=size)
1312 1312
1313 1313
1314 1314 class Page(_Page):
1315 1315 """
1316 1316 Custom pager to match rendering style with paginator
1317 1317 """
1318 1318
1319 1319 def _get_pos(self, cur_page, max_page, items):
1320 1320 edge = (items / 2) + 1
1321 1321 if (cur_page <= edge):
1322 1322 radius = max(items / 2, items - cur_page)
1323 1323 elif (max_page - cur_page) < edge:
1324 1324 radius = (items - 1) - (max_page - cur_page)
1325 1325 else:
1326 1326 radius = items / 2
1327 1327
1328 1328 left = max(1, (cur_page - (radius)))
1329 1329 right = min(max_page, cur_page + (radius))
1330 1330 return left, cur_page, right
1331 1331
1332 1332 def _range(self, regexp_match):
1333 1333 """
1334 1334 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1335 1335
1336 1336 Arguments:
1337 1337
1338 1338 regexp_match
1339 1339 A "re" (regular expressions) match object containing the
1340 1340 radius of linked pages around the current page in
1341 1341 regexp_match.group(1) as a string
1342 1342
1343 1343 This function is supposed to be called as a callable in
1344 1344 re.sub.
1345 1345
1346 1346 """
1347 1347 radius = int(regexp_match.group(1))
1348 1348
1349 1349 # Compute the first and last page number within the radius
1350 1350 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1351 1351 # -> leftmost_page = 5
1352 1352 # -> rightmost_page = 9
1353 1353 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1354 1354 self.last_page,
1355 1355 (radius * 2) + 1)
1356 1356 nav_items = []
1357 1357
1358 1358 # Create a link to the first page (unless we are on the first page
1359 1359 # or there would be no need to insert '..' spacers)
1360 1360 if self.page != self.first_page and self.first_page < leftmost_page:
1361 1361 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1362 1362
1363 1363 # Insert dots if there are pages between the first page
1364 1364 # and the currently displayed page range
1365 1365 if leftmost_page - self.first_page > 1:
1366 1366 # Wrap in a SPAN tag if nolink_attr is set
1367 1367 text = '..'
1368 1368 if self.dotdot_attr:
1369 1369 text = HTML.span(c=text, **self.dotdot_attr)
1370 1370 nav_items.append(text)
1371 1371
1372 1372 for thispage in xrange(leftmost_page, rightmost_page + 1):
1373 1373 # Hilight the current page number and do not use a link
1374 1374 if thispage == self.page:
1375 1375 text = '%s' % (thispage,)
1376 1376 # Wrap in a SPAN tag if nolink_attr is set
1377 1377 if self.curpage_attr:
1378 1378 text = HTML.span(c=text, **self.curpage_attr)
1379 1379 nav_items.append(text)
1380 1380 # Otherwise create just a link to that page
1381 1381 else:
1382 1382 text = '%s' % (thispage,)
1383 1383 nav_items.append(self._pagerlink(thispage, text))
1384 1384
1385 1385 # Insert dots if there are pages between the displayed
1386 1386 # page numbers and the end of the page range
1387 1387 if self.last_page - rightmost_page > 1:
1388 1388 text = '..'
1389 1389 # Wrap in a SPAN tag if nolink_attr is set
1390 1390 if self.dotdot_attr:
1391 1391 text = HTML.span(c=text, **self.dotdot_attr)
1392 1392 nav_items.append(text)
1393 1393
1394 1394 # Create a link to the very last page (unless we are on the last
1395 1395 # page or there would be no need to insert '..' spacers)
1396 1396 if self.page != self.last_page and rightmost_page < self.last_page:
1397 1397 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1398 1398
1399 1399 ## prerender links
1400 1400 #_page_link = url.current()
1401 1401 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1402 1402 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1403 1403 return self.separator.join(nav_items)
1404 1404
1405 1405 def pager(self, format='~2~', page_param='page', partial_param='partial',
1406 1406 show_if_single_page=False, separator=' ', onclick=None,
1407 1407 symbol_first='<<', symbol_last='>>',
1408 1408 symbol_previous='<', symbol_next='>',
1409 1409 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1410 1410 curpage_attr={'class': 'pager_curpage'},
1411 1411 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1412 1412
1413 1413 self.curpage_attr = curpage_attr
1414 1414 self.separator = separator
1415 1415 self.pager_kwargs = kwargs
1416 1416 self.page_param = page_param
1417 1417 self.partial_param = partial_param
1418 1418 self.onclick = onclick
1419 1419 self.link_attr = link_attr
1420 1420 self.dotdot_attr = dotdot_attr
1421 1421
1422 1422 # Don't show navigator if there is no more than one page
1423 1423 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1424 1424 return ''
1425 1425
1426 1426 from string import Template
1427 1427 # Replace ~...~ in token format by range of pages
1428 1428 result = re.sub(r'~(\d+)~', self._range, format)
1429 1429
1430 1430 # Interpolate '%' variables
1431 1431 result = Template(result).safe_substitute({
1432 1432 'first_page': self.first_page,
1433 1433 'last_page': self.last_page,
1434 1434 'page': self.page,
1435 1435 'page_count': self.page_count,
1436 1436 'items_per_page': self.items_per_page,
1437 1437 'first_item': self.first_item,
1438 1438 'last_item': self.last_item,
1439 1439 'item_count': self.item_count,
1440 1440 'link_first': self.page > self.first_page and \
1441 1441 self._pagerlink(self.first_page, symbol_first) or '',
1442 1442 'link_last': self.page < self.last_page and \
1443 1443 self._pagerlink(self.last_page, symbol_last) or '',
1444 1444 'link_previous': self.previous_page and \
1445 1445 self._pagerlink(self.previous_page, symbol_previous) \
1446 1446 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1447 1447 'link_next': self.next_page and \
1448 1448 self._pagerlink(self.next_page, symbol_next) \
1449 1449 or HTML.span(symbol_next, class_="pg-next disabled")
1450 1450 })
1451 1451
1452 1452 return literal(result)
1453 1453
1454 1454
1455 1455 #==============================================================================
1456 1456 # REPO PAGER, PAGER FOR REPOSITORY
1457 1457 #==============================================================================
1458 1458 class RepoPage(Page):
1459 1459
1460 1460 def __init__(self, collection, page=1, items_per_page=20,
1461 1461 item_count=None, url=None, **kwargs):
1462 1462
1463 1463 """Create a "RepoPage" instance. special pager for paging
1464 1464 repository
1465 1465 """
1466 1466 self._url_generator = url
1467 1467
1468 1468 # Safe the kwargs class-wide so they can be used in the pager() method
1469 1469 self.kwargs = kwargs
1470 1470
1471 1471 # Save a reference to the collection
1472 1472 self.original_collection = collection
1473 1473
1474 1474 self.collection = collection
1475 1475
1476 1476 # The self.page is the number of the current page.
1477 1477 # The first page has the number 1!
1478 1478 try:
1479 1479 self.page = int(page) # make it int() if we get it as a string
1480 1480 except (ValueError, TypeError):
1481 1481 self.page = 1
1482 1482
1483 1483 self.items_per_page = items_per_page
1484 1484
1485 1485 # Unless the user tells us how many items the collections has
1486 1486 # we calculate that ourselves.
1487 1487 if item_count is not None:
1488 1488 self.item_count = item_count
1489 1489 else:
1490 1490 self.item_count = len(self.collection)
1491 1491
1492 1492 # Compute the number of the first and last available page
1493 1493 if self.item_count > 0:
1494 1494 self.first_page = 1
1495 1495 self.page_count = int(math.ceil(float(self.item_count) /
1496 1496 self.items_per_page))
1497 1497 self.last_page = self.first_page + self.page_count - 1
1498 1498
1499 1499 # Make sure that the requested page number is the range of
1500 1500 # valid pages
1501 1501 if self.page > self.last_page:
1502 1502 self.page = self.last_page
1503 1503 elif self.page < self.first_page:
1504 1504 self.page = self.first_page
1505 1505
1506 1506 # Note: the number of items on this page can be less than
1507 1507 # items_per_page if the last page is not full
1508 1508 self.first_item = max(0, (self.item_count) - (self.page *
1509 1509 items_per_page))
1510 1510 self.last_item = ((self.item_count - 1) - items_per_page *
1511 1511 (self.page - 1))
1512 1512
1513 1513 self.items = list(self.collection[self.first_item:self.last_item + 1])
1514 1514
1515 1515 # Links to previous and next page
1516 1516 if self.page > self.first_page:
1517 1517 self.previous_page = self.page - 1
1518 1518 else:
1519 1519 self.previous_page = None
1520 1520
1521 1521 if self.page < self.last_page:
1522 1522 self.next_page = self.page + 1
1523 1523 else:
1524 1524 self.next_page = None
1525 1525
1526 1526 # No items available
1527 1527 else:
1528 1528 self.first_page = None
1529 1529 self.page_count = 0
1530 1530 self.last_page = None
1531 1531 self.first_item = None
1532 1532 self.last_item = None
1533 1533 self.previous_page = None
1534 1534 self.next_page = None
1535 1535 self.items = []
1536 1536
1537 1537 # This is a subclass of the 'list' type. Initialise the list now.
1538 1538 list.__init__(self, reversed(self.items))
1539 1539
1540 1540
1541 1541 def breadcrumb_repo_link(repo):
1542 1542 """
1543 1543 Makes a breadcrumbs path link to repo
1544 1544
1545 1545 ex::
1546 1546 group >> subgroup >> repo
1547 1547
1548 1548 :param repo: a Repository instance
1549 1549 """
1550 1550
1551 1551 path = [
1552 1552 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1553 1553 for group in repo.groups_with_parents
1554 1554 ] + [
1555 1555 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1556 1556 ]
1557 1557
1558 1558 return literal(' &raquo; '.join(path))
1559 1559
1560 1560
1561 1561 def format_byte_size_binary(file_size):
1562 1562 """
1563 1563 Formats file/folder sizes to standard.
1564 1564 """
1565 1565 if file_size is None:
1566 1566 file_size = 0
1567 1567
1568 1568 formatted_size = format_byte_size(file_size, binary=True)
1569 1569 return formatted_size
1570 1570
1571 1571
1572 1572 def urlify_text(text_, safe=True):
1573 1573 """
1574 1574 Extrac urls from text and make html links out of them
1575 1575
1576 1576 :param text_:
1577 1577 """
1578 1578
1579 1579 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1580 1580 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1581 1581
1582 1582 def url_func(match_obj):
1583 1583 url_full = match_obj.groups()[0]
1584 1584 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1585 1585 _newtext = url_pat.sub(url_func, text_)
1586 1586 if safe:
1587 1587 return literal(_newtext)
1588 1588 return _newtext
1589 1589
1590 1590
1591 1591 def urlify_commits(text_, repository):
1592 1592 """
1593 1593 Extract commit ids from text and make link from them
1594 1594
1595 1595 :param text_:
1596 1596 :param repository: repo name to build the URL with
1597 1597 """
1598 1598
1599 1599 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1600 1600
1601 1601 def url_func(match_obj):
1602 1602 commit_id = match_obj.groups()[1]
1603 1603 pref = match_obj.groups()[0]
1604 1604 suf = match_obj.groups()[2]
1605 1605
1606 1606 tmpl = (
1607 1607 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1608 1608 '%(commit_id)s</a>%(suf)s'
1609 1609 )
1610 1610 return tmpl % {
1611 1611 'pref': pref,
1612 1612 'cls': 'revision-link',
1613 1613 'url': route_url('repo_commit', repo_name=repository,
1614 1614 commit_id=commit_id),
1615 1615 'commit_id': commit_id,
1616 1616 'suf': suf
1617 1617 }
1618 1618
1619 1619 newtext = URL_PAT.sub(url_func, text_)
1620 1620
1621 1621 return newtext
1622 1622
1623 1623
1624 1624 def _process_url_func(match_obj, repo_name, uid, entry,
1625 1625 return_raw_data=False, link_format='html'):
1626 1626 pref = ''
1627 1627 if match_obj.group().startswith(' '):
1628 1628 pref = ' '
1629 1629
1630 1630 issue_id = ''.join(match_obj.groups())
1631 1631
1632 1632 if link_format == 'html':
1633 1633 tmpl = (
1634 1634 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1635 1635 '%(issue-prefix)s%(id-repr)s'
1636 1636 '</a>')
1637 1637 elif link_format == 'rst':
1638 1638 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1639 1639 elif link_format == 'markdown':
1640 1640 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1641 1641 else:
1642 1642 raise ValueError('Bad link_format:{}'.format(link_format))
1643 1643
1644 1644 (repo_name_cleaned,
1645 1645 parent_group_name) = RepoGroupModel().\
1646 1646 _get_group_name_and_parent(repo_name)
1647 1647
1648 1648 # variables replacement
1649 1649 named_vars = {
1650 1650 'id': issue_id,
1651 1651 'repo': repo_name,
1652 1652 'repo_name': repo_name_cleaned,
1653 1653 'group_name': parent_group_name
1654 1654 }
1655 1655 # named regex variables
1656 1656 named_vars.update(match_obj.groupdict())
1657 1657 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1658 1658
1659 1659 data = {
1660 1660 'pref': pref,
1661 1661 'cls': 'issue-tracker-link',
1662 1662 'url': _url,
1663 1663 'id-repr': issue_id,
1664 1664 'issue-prefix': entry['pref'],
1665 1665 'serv': entry['url'],
1666 1666 }
1667 1667 if return_raw_data:
1668 1668 return {
1669 1669 'id': issue_id,
1670 1670 'url': _url
1671 1671 }
1672 1672 return tmpl % data
1673 1673
1674 1674
1675 1675 def get_active_pattern_entries(repo_name):
1676 1676 repo = None
1677 1677 if repo_name:
1678 1678 # Retrieving repo_name to avoid invalid repo_name to explode on
1679 1679 # IssueTrackerSettingsModel but still passing invalid name further down
1680 1680 repo = Repository.get_by_repo_name(repo_name, cache=True)
1681 1681
1682 1682 settings_model = IssueTrackerSettingsModel(repo=repo)
1683 1683 active_entries = settings_model.get_settings(cache=True)
1684 1684 return active_entries
1685 1685
1686 1686
1687 1687 def process_patterns(text_string, repo_name, link_format='html',
1688 1688 active_entries=None):
1689 1689
1690 1690 allowed_formats = ['html', 'rst', 'markdown']
1691 1691 if link_format not in allowed_formats:
1692 1692 raise ValueError('Link format can be only one of:{} got {}'.format(
1693 1693 allowed_formats, link_format))
1694 1694
1695 1695 active_entries = active_entries or get_active_pattern_entries(repo_name)
1696 1696 issues_data = []
1697 1697 newtext = text_string
1698 1698
1699 1699 for uid, entry in active_entries.items():
1700 1700 log.debug('found issue tracker entry with uid %s' % (uid,))
1701 1701
1702 1702 if not (entry['pat'] and entry['url']):
1703 1703 log.debug('skipping due to missing data')
1704 1704 continue
1705 1705
1706 1706 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1707 1707 % (uid, entry['pat'], entry['url'], entry['pref']))
1708 1708
1709 1709 try:
1710 1710 pattern = re.compile(r'%s' % entry['pat'])
1711 1711 except re.error:
1712 1712 log.exception(
1713 1713 'issue tracker pattern: `%s` failed to compile',
1714 1714 entry['pat'])
1715 1715 continue
1716 1716
1717 1717 data_func = partial(
1718 1718 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1719 1719 return_raw_data=True)
1720 1720
1721 1721 for match_obj in pattern.finditer(text_string):
1722 1722 issues_data.append(data_func(match_obj))
1723 1723
1724 1724 url_func = partial(
1725 1725 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1726 1726 link_format=link_format)
1727 1727
1728 1728 newtext = pattern.sub(url_func, newtext)
1729 1729 log.debug('processed prefix:uid `%s`' % (uid,))
1730 1730
1731 1731 return newtext, issues_data
1732 1732
1733 1733
1734 1734 def urlify_commit_message(commit_text, repository=None,
1735 1735 active_pattern_entries=None):
1736 1736 """
1737 1737 Parses given text message and makes proper links.
1738 1738 issues are linked to given issue-server, and rest is a commit link
1739 1739
1740 1740 :param commit_text:
1741 1741 :param repository:
1742 1742 """
1743 1743 def escaper(string):
1744 1744 return string.replace('<', '&lt;').replace('>', '&gt;')
1745 1745
1746 1746 newtext = escaper(commit_text)
1747 1747
1748 1748 # extract http/https links and make them real urls
1749 1749 newtext = urlify_text(newtext, safe=False)
1750 1750
1751 1751 # urlify commits - extract commit ids and make link out of them, if we have
1752 1752 # the scope of repository present.
1753 1753 if repository:
1754 1754 newtext = urlify_commits(newtext, repository)
1755 1755
1756 1756 # process issue tracker patterns
1757 1757 newtext, issues = process_patterns(newtext, repository or '',
1758 1758 active_entries=active_pattern_entries)
1759 1759
1760 1760 return literal(newtext)
1761 1761
1762 1762
1763 1763 def render_binary(repo_name, file_obj):
1764 1764 """
1765 1765 Choose how to render a binary file
1766 1766 """
1767 1767 filename = file_obj.name
1768 1768
1769 1769 # images
1770 1770 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1771 1771 if fnmatch.fnmatch(filename, pat=ext):
1772 1772 alt = filename
1773 1773 src = route_path(
1774 1774 'repo_file_raw', repo_name=repo_name,
1775 1775 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1776 1776 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1777 1777
1778 1778
1779 1779 def renderer_from_filename(filename, exclude=None):
1780 1780 """
1781 1781 choose a renderer based on filename, this works only for text based files
1782 1782 """
1783 1783
1784 1784 # ipython
1785 1785 for ext in ['*.ipynb']:
1786 1786 if fnmatch.fnmatch(filename, pat=ext):
1787 1787 return 'jupyter'
1788 1788
1789 1789 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1790 1790 if is_markup:
1791 1791 return is_markup
1792 1792 return None
1793 1793
1794 1794
1795 1795 def render(source, renderer='rst', mentions=False, relative_urls=None,
1796 1796 repo_name=None):
1797 1797
1798 1798 def maybe_convert_relative_links(html_source):
1799 1799 if relative_urls:
1800 1800 return relative_links(html_source, relative_urls)
1801 1801 return html_source
1802 1802
1803 1803 if renderer == 'rst':
1804 1804 if repo_name:
1805 1805 # process patterns on comments if we pass in repo name
1806 1806 source, issues = process_patterns(
1807 1807 source, repo_name, link_format='rst')
1808 1808
1809 1809 return literal(
1810 1810 '<div class="rst-block">%s</div>' %
1811 1811 maybe_convert_relative_links(
1812 1812 MarkupRenderer.rst(source, mentions=mentions)))
1813 1813 elif renderer == 'markdown':
1814 1814 if repo_name:
1815 1815 # process patterns on comments if we pass in repo name
1816 1816 source, issues = process_patterns(
1817 1817 source, repo_name, link_format='markdown')
1818 1818
1819 1819 return literal(
1820 1820 '<div class="markdown-block">%s</div>' %
1821 1821 maybe_convert_relative_links(
1822 1822 MarkupRenderer.markdown(source, flavored=True,
1823 1823 mentions=mentions)))
1824 1824 elif renderer == 'jupyter':
1825 1825 return literal(
1826 1826 '<div class="ipynb">%s</div>' %
1827 1827 maybe_convert_relative_links(
1828 1828 MarkupRenderer.jupyter(source)))
1829 1829
1830 1830 # None means just show the file-source
1831 1831 return None
1832 1832
1833 1833
1834 1834 def commit_status(repo, commit_id):
1835 1835 return ChangesetStatusModel().get_status(repo, commit_id)
1836 1836
1837 1837
1838 1838 def commit_status_lbl(commit_status):
1839 1839 return dict(ChangesetStatus.STATUSES).get(commit_status)
1840 1840
1841 1841
1842 1842 def commit_time(repo_name, commit_id):
1843 1843 repo = Repository.get_by_repo_name(repo_name)
1844 1844 commit = repo.get_commit(commit_id=commit_id)
1845 1845 return commit.date
1846 1846
1847 1847
1848 1848 def get_permission_name(key):
1849 1849 return dict(Permission.PERMS).get(key)
1850 1850
1851 1851
1852 1852 def journal_filter_help(request):
1853 1853 _ = request.translate
1854 1854
1855 1855 return _(
1856 1856 'Example filter terms:\n' +
1857 1857 ' repository:vcs\n' +
1858 1858 ' username:marcin\n' +
1859 1859 ' username:(NOT marcin)\n' +
1860 1860 ' action:*push*\n' +
1861 1861 ' ip:127.0.0.1\n' +
1862 1862 ' date:20120101\n' +
1863 1863 ' date:[20120101100000 TO 20120102]\n' +
1864 1864 '\n' +
1865 1865 'Generate wildcards using \'*\' character:\n' +
1866 1866 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1867 1867 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1868 1868 '\n' +
1869 1869 'Optional AND / OR operators in queries\n' +
1870 1870 ' "repository:vcs OR repository:test"\n' +
1871 1871 ' "username:test AND repository:test*"\n'
1872 1872 )
1873 1873
1874 1874
1875 1875 def search_filter_help(searcher, request):
1876 1876 _ = request.translate
1877 1877
1878 1878 terms = ''
1879 1879 return _(
1880 1880 'Example filter terms for `{searcher}` search:\n' +
1881 1881 '{terms}\n' +
1882 1882 'Generate wildcards using \'*\' character:\n' +
1883 1883 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1884 1884 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1885 1885 '\n' +
1886 1886 'Optional AND / OR operators in queries\n' +
1887 1887 ' "repo_name:vcs OR repo_name:test"\n' +
1888 1888 ' "owner:test AND repo_name:test*"\n' +
1889 1889 'More: {search_doc}'
1890 1890 ).format(searcher=searcher.name,
1891 1891 terms=terms, search_doc=searcher.query_lang_doc)
1892 1892
1893 1893
1894 1894 def not_mapped_error(repo_name):
1895 1895 from rhodecode.translation import _
1896 1896 flash(_('%s repository is not mapped to db perhaps'
1897 1897 ' it was created or renamed from the filesystem'
1898 1898 ' please run the application again'
1899 1899 ' in order to rescan repositories') % repo_name, category='error')
1900 1900
1901 1901
1902 1902 def ip_range(ip_addr):
1903 1903 from rhodecode.model.db import UserIpMap
1904 1904 s, e = UserIpMap._get_ip_range(ip_addr)
1905 1905 return '%s - %s' % (s, e)
1906 1906
1907 1907
1908 1908 def form(url, method='post', needs_csrf_token=True, **attrs):
1909 1909 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1910 1910 if method.lower() != 'get' and needs_csrf_token:
1911 1911 raise Exception(
1912 1912 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1913 1913 'CSRF token. If the endpoint does not require such token you can ' +
1914 1914 'explicitly set the parameter needs_csrf_token to false.')
1915 1915
1916 1916 return wh_form(url, method=method, **attrs)
1917 1917
1918 1918
1919 1919 def secure_form(form_url, method="POST", multipart=False, **attrs):
1920 1920 """Start a form tag that points the action to an url. This
1921 1921 form tag will also include the hidden field containing
1922 1922 the auth token.
1923 1923
1924 1924 The url options should be given either as a string, or as a
1925 1925 ``url()`` function. The method for the form defaults to POST.
1926 1926
1927 1927 Options:
1928 1928
1929 1929 ``multipart``
1930 1930 If set to True, the enctype is set to "multipart/form-data".
1931 1931 ``method``
1932 1932 The method to use when submitting the form, usually either
1933 1933 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1934 1934 hidden input with name _method is added to simulate the verb
1935 1935 over POST.
1936 1936
1937 1937 """
1938 1938 from webhelpers.pylonslib.secure_form import insecure_form
1939 1939
1940 1940 if 'request' in attrs:
1941 1941 session = attrs['request'].session
1942 1942 del attrs['request']
1943 1943 else:
1944 1944 raise ValueError(
1945 1945 'Calling this form requires request= to be passed as argument')
1946 1946
1947 1947 form = insecure_form(form_url, method, multipart, **attrs)
1948 1948 token = literal(
1949 1949 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1950 1950 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1951 1951
1952 1952 return literal("%s\n%s" % (form, token))
1953 1953
1954 1954
1955 1955 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1956 1956 select_html = select(name, selected, options, **attrs)
1957 1957 select2 = """
1958 1958 <script>
1959 1959 $(document).ready(function() {
1960 1960 $('#%s').select2({
1961 1961 containerCssClass: 'drop-menu',
1962 1962 dropdownCssClass: 'drop-menu-dropdown',
1963 1963 dropdownAutoWidth: true%s
1964 1964 });
1965 1965 });
1966 1966 </script>
1967 1967 """
1968 1968 filter_option = """,
1969 1969 minimumResultsForSearch: -1
1970 1970 """
1971 1971 input_id = attrs.get('id') or name
1972 1972 filter_enabled = "" if enable_filter else filter_option
1973 1973 select_script = literal(select2 % (input_id, filter_enabled))
1974 1974
1975 1975 return literal(select_html+select_script)
1976 1976
1977 1977
1978 1978 def get_visual_attr(tmpl_context_var, attr_name):
1979 1979 """
1980 1980 A safe way to get a variable from visual variable of template context
1981 1981
1982 1982 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1983 1983 :param attr_name: name of the attribute we fetch from the c.visual
1984 1984 """
1985 1985 visual = getattr(tmpl_context_var, 'visual', None)
1986 1986 if not visual:
1987 1987 return
1988 1988 else:
1989 1989 return getattr(visual, attr_name, None)
1990 1990
1991 1991
1992 1992 def get_last_path_part(file_node):
1993 1993 if not file_node.path:
1994 1994 return u''
1995 1995
1996 1996 path = safe_unicode(file_node.path.split('/')[-1])
1997 1997 return u'../' + path
1998 1998
1999 1999
2000 2000 def route_url(*args, **kwargs):
2001 2001 """
2002 2002 Wrapper around pyramids `route_url` (fully qualified url) function.
2003 2003 """
2004 2004 req = get_current_request()
2005 2005 return req.route_url(*args, **kwargs)
2006 2006
2007 2007
2008 2008 def route_path(*args, **kwargs):
2009 2009 """
2010 2010 Wrapper around pyramids `route_path` function.
2011 2011 """
2012 2012 req = get_current_request()
2013 2013 return req.route_path(*args, **kwargs)
2014 2014
2015 2015
2016 2016 def route_path_or_none(*args, **kwargs):
2017 2017 try:
2018 2018 return route_path(*args, **kwargs)
2019 2019 except KeyError:
2020 2020 return None
2021 2021
2022 2022
2023 2023 def current_route_path(request, **kw):
2024 2024 new_args = request.GET.mixed()
2025 2025 new_args.update(kw)
2026 2026 return request.current_route_path(_query=new_args)
2027 2027
2028 2028
2029 2029 def api_call_example(method, args):
2030 2030 """
2031 2031 Generates an API call example via CURL
2032 2032 """
2033 2033 args_json = json.dumps(OrderedDict([
2034 2034 ('id', 1),
2035 2035 ('auth_token', 'SECRET'),
2036 2036 ('method', method),
2037 2037 ('args', args)
2038 2038 ]))
2039 2039 return literal(
2040 2040 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2041 2041 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2042 2042 "and needs to be of `api calls` role."
2043 2043 .format(
2044 2044 api_url=route_url('apiv2'),
2045 2045 token_url=route_url('my_account_auth_tokens'),
2046 2046 data=args_json))
2047 2047
2048 2048
2049 2049 def notification_description(notification, request):
2050 2050 """
2051 2051 Generate notification human readable description based on notification type
2052 2052 """
2053 2053 from rhodecode.model.notification import NotificationModel
2054 2054 return NotificationModel().make_description(
2055 2055 notification, translate=request.translate)
2056 2056
2057 2057
2058 2058 def go_import_header(request, db_repo=None):
2059 2059 """
2060 2060 Creates a header for go-import functionality in Go Lang
2061 2061 """
2062 2062
2063 2063 if not db_repo:
2064 2064 return
2065 2065 if 'go-get' not in request.GET:
2066 2066 return
2067 2067
2068 2068 clone_url = db_repo.clone_url()
2069 2069 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2070 2070 # we have a repo and go-get flag,
2071 2071 return literal('<meta name="go-import" content="{} {} {}">'.format(
2072 2072 prefix, db_repo.repo_type, clone_url))
2073
2074
2075 def reviewer_as_json(*args, **kwargs):
2076 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2077 return _reviewer_as_json(*args, **kwargs)
@@ -1,267 +1,393 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import itertools
23 23 import logging
24 from collections import defaultdict
24 import collections
25 25
26 26 from rhodecode.model import BaseModel
27 27 from rhodecode.model.db import (
28 28 ChangesetStatus, ChangesetComment, PullRequest, Session)
29 29 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
30 30 from rhodecode.lib.markup_renderer import (
31 31 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 class ChangesetStatusModel(BaseModel):
37 37
38 38 cls = ChangesetStatus
39 39
40 40 def __get_changeset_status(self, changeset_status):
41 41 return self._get_instance(ChangesetStatus, changeset_status)
42 42
43 43 def __get_pull_request(self, pull_request):
44 44 return self._get_instance(PullRequest, pull_request)
45 45
46 46 def _get_status_query(self, repo, revision, pull_request,
47 47 with_revisions=False):
48 48 repo = self._get_repo(repo)
49 49
50 50 q = ChangesetStatus.query()\
51 51 .filter(ChangesetStatus.repo == repo)
52 52 if not with_revisions:
53 53 q = q.filter(ChangesetStatus.version == 0)
54 54
55 55 if revision:
56 56 q = q.filter(ChangesetStatus.revision == revision)
57 57 elif pull_request:
58 58 pull_request = self.__get_pull_request(pull_request)
59 59 # TODO: johbo: Think about the impact of this join, there must
60 60 # be a reason why ChangesetStatus and ChanagesetComment is linked
61 61 # to the pull request. Might be that we want to do the same for
62 62 # the pull_request_version_id.
63 63 q = q.join(ChangesetComment).filter(
64 64 ChangesetStatus.pull_request == pull_request,
65 65 ChangesetComment.pull_request_version_id == None)
66 66 else:
67 67 raise Exception('Please specify revision or pull_request')
68 68 q = q.order_by(ChangesetStatus.version.asc())
69 69 return q
70 70
71 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
72 trim_votes=True):
73 """
74 Calculate status based on given group members, and voting rule
75
76
77 group1 - 4 members, 3 required for approval
78 user1 - approved
79 user2 - reject
80 user3 - approved
81 user4 - rejected
82
83 final_state: rejected, reasons not at least 3 votes
84
85
86 group1 - 4 members, 2 required for approval
87 user1 - approved
88 user2 - reject
89 user3 - approved
90 user4 - rejected
91
92 final_state: approved, reasons got at least 2 approvals
93
94 group1 - 4 members, ALL required for approval
95 user1 - approved
96 user2 - reject
97 user3 - approved
98 user4 - rejected
99
100 final_state: rejected, reasons not all approvals
101
102
103 group1 - 4 members, ALL required for approval
104 user1 - approved
105 user2 - approved
106 user3 - approved
107 user4 - approved
108
109 final_state: approved, reason all approvals received
110
111 group1 - 4 members, 5 required for approval
112 (approval should be shorted to number of actual members)
113
114 user1 - approved
115 user2 - approved
116 user3 - approved
117 user4 - approved
118
119 final_state: approved, reason all approvals received
120
121 """
122 group_vote_data = {}
123 got_rule = False
124 members = collections.OrderedDict()
125 for review_obj, user, reasons, mandatory, statuses \
126 in group_statuses_by_reviewers:
127
128 if not got_rule:
129 group_vote_data = review_obj.rule_user_group_data()
130 got_rule = bool(group_vote_data)
131
132 members[user.user_id] = statuses
133
134 if not group_vote_data:
135 return []
136
137 required_votes = group_vote_data['vote_rule']
138 if required_votes == -1:
139 # -1 means all required, so we replace it with how many people
140 # are in the members
141 required_votes = len(members)
142
143 if trim_votes and required_votes > len(members):
144 # we require more votes than we have members in the group
145 # in this case we trim the required votes to the number of members
146 required_votes = len(members)
147
148 approvals = sum([
149 1 for statuses in members.values()
150 if statuses and
151 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
152
153 calculated_votes = []
154 # we have all votes from users, now check if we have enough votes
155 # to fill other
156 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
157 if approvals >= required_votes:
158 fill_in = ChangesetStatus.STATUS_APPROVED
159
160 for member, statuses in members.items():
161 if statuses:
162 ver, latest = statuses[0]
163 if fill_in == ChangesetStatus.STATUS_APPROVED:
164 calculated_votes.append(fill_in)
165 else:
166 calculated_votes.append(latest.status)
167 else:
168 calculated_votes.append(fill_in)
169
170 return calculated_votes
171
71 172 def calculate_status(self, statuses_by_reviewers):
72 173 """
73 174 Given the approval statuses from reviewers, calculates final approval
74 175 status. There can only be 3 results, all approved, all rejected. If
75 176 there is no consensus the PR is under review.
76 177
77 178 :param statuses_by_reviewers:
78 179 """
79 votes = defaultdict(int)
180
181 def group_rule(element):
182 review_obj = element[0]
183 rule_data = review_obj.rule_user_group_data()
184 if rule_data and rule_data['id']:
185 return rule_data['id']
186
187 voting_groups = itertools.groupby(
188 sorted(statuses_by_reviewers, key=group_rule), group_rule)
189
190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
191
80 192 reviewers_number = len(statuses_by_reviewers)
81 for user, reasons, mandatory, statuses in statuses_by_reviewers:
193 votes = collections.defaultdict(int)
194 for group, group_statuses_by_reviewers in voting_by_groups:
195 if group:
196 # calculate how the "group" voted
197 for vote_status in self.calculate_group_vote(
198 group, group_statuses_by_reviewers):
199 votes[vote_status] += 1
200 else:
201
202 for review_obj, user, reasons, mandatory, statuses \
203 in group_statuses_by_reviewers:
204 # individual vote
82 205 if statuses:
83 206 ver, latest = statuses[0]
84 207 votes[latest.status] += 1
85 else:
86 votes[ChangesetStatus.DEFAULT] += 1
208
209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
87 211
88 # all approved
89 if votes.get(ChangesetStatus.STATUS_APPROVED) == reviewers_number:
212 # TODO(marcink): with group voting, how does rejected work,
213 # do we ever get rejected state ?
214
215 if approved_votes_count == reviewers_number:
90 216 return ChangesetStatus.STATUS_APPROVED
91 217
92 # all rejected
93 if votes.get(ChangesetStatus.STATUS_REJECTED) == reviewers_number:
218 if rejected_votes_count == reviewers_number:
94 219 return ChangesetStatus.STATUS_REJECTED
95 220
96 221 return ChangesetStatus.STATUS_UNDER_REVIEW
97 222
98 223 def get_statuses(self, repo, revision=None, pull_request=None,
99 224 with_revisions=False):
100 225 q = self._get_status_query(repo, revision, pull_request,
101 226 with_revisions)
102 227 return q.all()
103 228
104 229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
105 230 """
106 231 Returns latest status of changeset for given revision or for given
107 232 pull request. Statuses are versioned inside a table itself and
108 233 version == 0 is always the current one
109 234
110 235 :param repo:
111 236 :param revision: 40char hash or None
112 237 :param pull_request: pull_request reference
113 238 :param as_str: return status as string not object
114 239 """
115 240 q = self._get_status_query(repo, revision, pull_request)
116 241
117 242 # need to use first here since there can be multiple statuses
118 243 # returned from pull_request
119 244 status = q.first()
120 245 if as_str:
121 246 status = status.status if status else status
122 247 st = status or ChangesetStatus.DEFAULT
123 248 return str(st)
124 249 return status
125 250
126 251 def _render_auto_status_message(
127 252 self, status, commit_id=None, pull_request=None):
128 253 """
129 254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
130 255 so it's always looking the same disregarding on which default
131 256 renderer system is using.
132 257
133 258 :param status: status text to change into
134 259 :param commit_id: the commit_id we change the status for
135 260 :param pull_request: the pull request we change the status for
136 261 """
137 262
138 263 new_status = ChangesetStatus.get_status_lbl(status)
139 264
140 265 params = {
141 266 'new_status_label': new_status,
142 267 'pull_request': pull_request,
143 268 'commit_id': commit_id,
144 269 }
145 270 renderer = RstTemplateRenderer()
146 271 return renderer.render('auto_status_change.mako', **params)
147 272
148 273 def set_status(self, repo, status, user, comment=None, revision=None,
149 274 pull_request=None, dont_allow_on_closed_pull_request=False):
150 275 """
151 276 Creates new status for changeset or updates the old ones bumping their
152 277 version, leaving the current status at
153 278
154 279 :param repo:
155 280 :param revision:
156 281 :param status:
157 282 :param user:
158 283 :param comment:
159 284 :param dont_allow_on_closed_pull_request: don't allow a status change
160 285 if last status was for pull request and it's closed. We shouldn't
161 286 mess around this manually
162 287 """
163 288 repo = self._get_repo(repo)
164 289
165 290 q = ChangesetStatus.query()
166 291
167 292 if revision:
168 293 q = q.filter(ChangesetStatus.repo == repo)
169 294 q = q.filter(ChangesetStatus.revision == revision)
170 295 elif pull_request:
171 296 pull_request = self.__get_pull_request(pull_request)
172 297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
173 298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
174 299 cur_statuses = q.all()
175 300
176 301 # if statuses exists and last is associated with a closed pull request
177 302 # we need to check if we can allow this status change
178 303 if (dont_allow_on_closed_pull_request and cur_statuses
179 304 and getattr(cur_statuses[0].pull_request, 'status', '')
180 305 == PullRequest.STATUS_CLOSED):
181 306 raise StatusChangeOnClosedPullRequestError(
182 307 'Changing status on closed pull request is not allowed'
183 308 )
184 309
185 310 # update all current statuses with older version
186 311 if cur_statuses:
187 312 for st in cur_statuses:
188 313 st.version += 1
189 314 Session().add(st)
190 315
191 316 def _create_status(user, repo, status, comment, revision, pull_request):
192 317 new_status = ChangesetStatus()
193 318 new_status.author = self._get_user(user)
194 319 new_status.repo = self._get_repo(repo)
195 320 new_status.status = status
196 321 new_status.comment = comment
197 322 new_status.revision = revision
198 323 new_status.pull_request = pull_request
199 324 return new_status
200 325
201 326 if not comment:
202 327 from rhodecode.model.comment import CommentsModel
203 328 comment = CommentsModel().create(
204 329 text=self._render_auto_status_message(
205 330 status, commit_id=revision, pull_request=pull_request),
206 331 repo=repo,
207 332 user=user,
208 333 pull_request=pull_request,
209 334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
210 335 )
211 336
212 337 if revision:
213 338 new_status = _create_status(
214 339 user=user, repo=repo, status=status, comment=comment,
215 340 revision=revision, pull_request=pull_request)
216 341 Session().add(new_status)
217 342 return new_status
218 343 elif pull_request:
219 344 # pull request can have more than one revision associated to it
220 345 # we need to create new version for each one
221 346 new_statuses = []
222 347 repo = pull_request.source_repo
223 348 for rev in pull_request.revisions:
224 349 new_status = _create_status(
225 350 user=user, repo=repo, status=status, comment=comment,
226 351 revision=rev, pull_request=pull_request)
227 352 new_statuses.append(new_status)
228 353 Session().add(new_status)
229 354 return new_statuses
230 355
231 356 def reviewers_statuses(self, pull_request):
232 357 _commit_statuses = self.get_statuses(
233 358 pull_request.source_repo,
234 359 pull_request=pull_request,
235 360 with_revisions=True)
236 361
237 commit_statuses = defaultdict(list)
362 commit_statuses = collections.defaultdict(list)
238 363 for st in _commit_statuses:
239 364 commit_statuses[st.author.username] += [st]
240 365
241 366 pull_request_reviewers = []
242 367
243 368 def version(commit_status):
244 369 return commit_status.version
245 370
246 for o in pull_request.reviewers:
247 if not o.user:
371 for obj in pull_request.reviewers:
372 if not obj.user:
248 373 continue
249 statuses = commit_statuses.get(o.user.username, None)
374 statuses = commit_statuses.get(obj.user.username, None)
250 375 if statuses:
251 statuses = [(x, list(y)[0])
252 for x, y in (itertools.groupby(
253 sorted(statuses, key=version),version))]
376 status_groups = itertools.groupby(
377 sorted(statuses, key=version), version)
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
254 379
255 380 pull_request_reviewers.append(
256 (o.user, o.reasons, o.mandatory, statuses))
381 (obj, obj.user, obj.reasons, obj.mandatory, statuses))
382
257 383 return pull_request_reviewers
258 384
259 385 def calculated_review_status(self, pull_request, reviewers_statuses=None):
260 386 """
261 387 calculate pull request status based on reviewers, it should be a list
262 388 of two element lists.
263 389
264 390 :param reviewers_statuses:
265 391 """
266 392 reviewers = reviewers_statuses or self.reviewers_statuses(pull_request)
267 393 return self.calculate_status(reviewers)
@@ -1,4402 +1,4452 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # noqa
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from beaker.cache import cache_region
51 51 from zope.cachedescriptors.property import Lazy as LazyProperty
52 52
53 53 from pyramid.threadlocal import get_current_request
54 54
55 55 from rhodecode.translation import _
56 56 from rhodecode.lib.vcs import get_vcs_instance
57 57 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
58 58 from rhodecode.lib.utils2 import (
59 59 str2bool, safe_str, get_commit_safe, safe_unicode, md5_safe,
60 60 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
61 61 glob2re, StrictAttributeDict, cleaned_uri)
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
63 JsonRaw
62 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType
64 63 from rhodecode.lib.ext_json import json
65 64 from rhodecode.lib.caching_query import FromCache
66 65 from rhodecode.lib.encrypt import AESCipher
67 66
68 67 from rhodecode.model.meta import Base, Session
69 68
70 69 URL_SEP = '/'
71 70 log = logging.getLogger(__name__)
72 71
73 72 # =============================================================================
74 73 # BASE CLASSES
75 74 # =============================================================================
76 75
77 76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
78 77 # beaker.session.secret if first is not set.
79 78 # and initialized at environment.py
80 79 ENCRYPTION_KEY = None
81 80
82 81 # used to sort permissions by types, '#' used here is not allowed to be in
83 82 # usernames, and it's very early in sorted string.printable table.
84 83 PERMISSION_TYPE_SORT = {
85 84 'admin': '####',
86 85 'write': '###',
87 86 'read': '##',
88 87 'none': '#',
89 88 }
90 89
91 90
92 91 def display_user_sort(obj):
93 92 """
94 93 Sort function used to sort permissions in .permissions() function of
95 94 Repository, RepoGroup, UserGroup. Also it put the default user in front
96 95 of all other resources
97 96 """
98 97
99 98 if obj.username == User.DEFAULT_USER:
100 99 return '#####'
101 100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
102 101 return prefix + obj.username
103 102
104 103
105 104 def display_user_group_sort(obj):
106 105 """
107 106 Sort function used to sort permissions in .permissions() function of
108 107 Repository, RepoGroup, UserGroup. Also it put the default user in front
109 108 of all other resources
110 109 """
111 110
112 111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
113 112 return prefix + obj.users_group_name
114 113
115 114
116 115 def _hash_key(k):
117 116 return md5_safe(k)
118 117
119 118
120 119 def in_filter_generator(qry, items, limit=500):
121 120 """
122 121 Splits IN() into multiple with OR
123 122 e.g.::
124 123 cnt = Repository.query().filter(
125 124 or_(
126 125 *in_filter_generator(Repository.repo_id, range(100000))
127 126 )).count()
128 127 """
129 128 if not items:
130 129 # empty list will cause empty query which might cause security issues
131 130 # this can lead to hidden unpleasant results
132 131 items = [-1]
133 132
134 133 parts = []
135 134 for chunk in xrange(0, len(items), limit):
136 135 parts.append(
137 136 qry.in_(items[chunk: chunk + limit])
138 137 )
139 138
140 139 return parts
141 140
142 141
143 142 class EncryptedTextValue(TypeDecorator):
144 143 """
145 144 Special column for encrypted long text data, use like::
146 145
147 146 value = Column("encrypted_value", EncryptedValue(), nullable=False)
148 147
149 148 This column is intelligent so if value is in unencrypted form it return
150 149 unencrypted form, but on save it always encrypts
151 150 """
152 151 impl = Text
153 152
154 153 def process_bind_param(self, value, dialect):
155 154 if not value:
156 155 return value
157 156 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
158 157 # protect against double encrypting if someone manually starts
159 158 # doing
160 159 raise ValueError('value needs to be in unencrypted format, ie. '
161 160 'not starting with enc$aes')
162 161 return 'enc$aes_hmac$%s' % AESCipher(
163 162 ENCRYPTION_KEY, hmac=True).encrypt(value)
164 163
165 164 def process_result_value(self, value, dialect):
166 165 import rhodecode
167 166
168 167 if not value:
169 168 return value
170 169
171 170 parts = value.split('$', 3)
172 171 if not len(parts) == 3:
173 172 # probably not encrypted values
174 173 return value
175 174 else:
176 175 if parts[0] != 'enc':
177 176 # parts ok but without our header ?
178 177 return value
179 178 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
180 179 'rhodecode.encrypted_values.strict') or True)
181 180 # at that stage we know it's our encryption
182 181 if parts[1] == 'aes':
183 182 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
184 183 elif parts[1] == 'aes_hmac':
185 184 decrypted_data = AESCipher(
186 185 ENCRYPTION_KEY, hmac=True,
187 186 strict_verification=enc_strict_mode).decrypt(parts[2])
188 187 else:
189 188 raise ValueError(
190 189 'Encryption type part is wrong, must be `aes` '
191 190 'or `aes_hmac`, got `%s` instead' % (parts[1]))
192 191 return decrypted_data
193 192
194 193
195 194 class BaseModel(object):
196 195 """
197 196 Base Model for all classes
198 197 """
199 198
200 199 @classmethod
201 200 def _get_keys(cls):
202 201 """return column names for this model """
203 202 return class_mapper(cls).c.keys()
204 203
205 204 def get_dict(self):
206 205 """
207 206 return dict with keys and values corresponding
208 207 to this model data """
209 208
210 209 d = {}
211 210 for k in self._get_keys():
212 211 d[k] = getattr(self, k)
213 212
214 213 # also use __json__() if present to get additional fields
215 214 _json_attr = getattr(self, '__json__', None)
216 215 if _json_attr:
217 216 # update with attributes from __json__
218 217 if callable(_json_attr):
219 218 _json_attr = _json_attr()
220 219 for k, val in _json_attr.iteritems():
221 220 d[k] = val
222 221 return d
223 222
224 223 def get_appstruct(self):
225 224 """return list with keys and values tuples corresponding
226 225 to this model data """
227 226
228 227 lst = []
229 228 for k in self._get_keys():
230 229 lst.append((k, getattr(self, k),))
231 230 return lst
232 231
233 232 def populate_obj(self, populate_dict):
234 233 """populate model with data from given populate_dict"""
235 234
236 235 for k in self._get_keys():
237 236 if k in populate_dict:
238 237 setattr(self, k, populate_dict[k])
239 238
240 239 @classmethod
241 240 def query(cls):
242 241 return Session().query(cls)
243 242
244 243 @classmethod
245 244 def get(cls, id_):
246 245 if id_:
247 246 return cls.query().get(id_)
248 247
249 248 @classmethod
250 249 def get_or_404(cls, id_):
251 250 from pyramid.httpexceptions import HTTPNotFound
252 251
253 252 try:
254 253 id_ = int(id_)
255 254 except (TypeError, ValueError):
256 255 raise HTTPNotFound()
257 256
258 257 res = cls.query().get(id_)
259 258 if not res:
260 259 raise HTTPNotFound()
261 260 return res
262 261
263 262 @classmethod
264 263 def getAll(cls):
265 264 # deprecated and left for backward compatibility
266 265 return cls.get_all()
267 266
268 267 @classmethod
269 268 def get_all(cls):
270 269 return cls.query().all()
271 270
272 271 @classmethod
273 272 def delete(cls, id_):
274 273 obj = cls.query().get(id_)
275 274 Session().delete(obj)
276 275
277 276 @classmethod
278 277 def identity_cache(cls, session, attr_name, value):
279 278 exist_in_session = []
280 279 for (item_cls, pkey), instance in session.identity_map.items():
281 280 if cls == item_cls and getattr(instance, attr_name) == value:
282 281 exist_in_session.append(instance)
283 282 if exist_in_session:
284 283 if len(exist_in_session) == 1:
285 284 return exist_in_session[0]
286 285 log.exception(
287 286 'multiple objects with attr %s and '
288 287 'value %s found with same name: %r',
289 288 attr_name, value, exist_in_session)
290 289
291 290 def __repr__(self):
292 291 if hasattr(self, '__unicode__'):
293 292 # python repr needs to return str
294 293 try:
295 294 return safe_str(self.__unicode__())
296 295 except UnicodeDecodeError:
297 296 pass
298 297 return '<DB:%s>' % (self.__class__.__name__)
299 298
300 299
301 300 class RhodeCodeSetting(Base, BaseModel):
302 301 __tablename__ = 'rhodecode_settings'
303 302 __table_args__ = (
304 303 UniqueConstraint('app_settings_name'),
305 304 {'extend_existing': True, 'mysql_engine': 'InnoDB',
306 305 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
307 306 )
308 307
309 308 SETTINGS_TYPES = {
310 309 'str': safe_str,
311 310 'int': safe_int,
312 311 'unicode': safe_unicode,
313 312 'bool': str2bool,
314 313 'list': functools.partial(aslist, sep=',')
315 314 }
316 315 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
317 316 GLOBAL_CONF_KEY = 'app_settings'
318 317
319 318 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
320 319 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
321 320 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
322 321 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
323 322
324 323 def __init__(self, key='', val='', type='unicode'):
325 324 self.app_settings_name = key
326 325 self.app_settings_type = type
327 326 self.app_settings_value = val
328 327
329 328 @validates('_app_settings_value')
330 329 def validate_settings_value(self, key, val):
331 330 assert type(val) == unicode
332 331 return val
333 332
334 333 @hybrid_property
335 334 def app_settings_value(self):
336 335 v = self._app_settings_value
337 336 _type = self.app_settings_type
338 337 if _type:
339 338 _type = self.app_settings_type.split('.')[0]
340 339 # decode the encrypted value
341 340 if 'encrypted' in self.app_settings_type:
342 341 cipher = EncryptedTextValue()
343 342 v = safe_unicode(cipher.process_result_value(v, None))
344 343
345 344 converter = self.SETTINGS_TYPES.get(_type) or \
346 345 self.SETTINGS_TYPES['unicode']
347 346 return converter(v)
348 347
349 348 @app_settings_value.setter
350 349 def app_settings_value(self, val):
351 350 """
352 351 Setter that will always make sure we use unicode in app_settings_value
353 352
354 353 :param val:
355 354 """
356 355 val = safe_unicode(val)
357 356 # encode the encrypted value
358 357 if 'encrypted' in self.app_settings_type:
359 358 cipher = EncryptedTextValue()
360 359 val = safe_unicode(cipher.process_bind_param(val, None))
361 360 self._app_settings_value = val
362 361
363 362 @hybrid_property
364 363 def app_settings_type(self):
365 364 return self._app_settings_type
366 365
367 366 @app_settings_type.setter
368 367 def app_settings_type(self, val):
369 368 if val.split('.')[0] not in self.SETTINGS_TYPES:
370 369 raise Exception('type must be one of %s got %s'
371 370 % (self.SETTINGS_TYPES.keys(), val))
372 371 self._app_settings_type = val
373 372
374 373 def __unicode__(self):
375 374 return u"<%s('%s:%s[%s]')>" % (
376 375 self.__class__.__name__,
377 376 self.app_settings_name, self.app_settings_value,
378 377 self.app_settings_type
379 378 )
380 379
381 380
382 381 class RhodeCodeUi(Base, BaseModel):
383 382 __tablename__ = 'rhodecode_ui'
384 383 __table_args__ = (
385 384 UniqueConstraint('ui_key'),
386 385 {'extend_existing': True, 'mysql_engine': 'InnoDB',
387 386 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
388 387 )
389 388
390 389 HOOK_REPO_SIZE = 'changegroup.repo_size'
391 390 # HG
392 391 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
393 392 HOOK_PULL = 'outgoing.pull_logger'
394 393 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
395 394 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
396 395 HOOK_PUSH = 'changegroup.push_logger'
397 396 HOOK_PUSH_KEY = 'pushkey.key_push'
398 397
399 398 # TODO: johbo: Unify way how hooks are configured for git and hg,
400 399 # git part is currently hardcoded.
401 400
402 401 # SVN PATTERNS
403 402 SVN_BRANCH_ID = 'vcs_svn_branch'
404 403 SVN_TAG_ID = 'vcs_svn_tag'
405 404
406 405 ui_id = Column(
407 406 "ui_id", Integer(), nullable=False, unique=True, default=None,
408 407 primary_key=True)
409 408 ui_section = Column(
410 409 "ui_section", String(255), nullable=True, unique=None, default=None)
411 410 ui_key = Column(
412 411 "ui_key", String(255), nullable=True, unique=None, default=None)
413 412 ui_value = Column(
414 413 "ui_value", String(255), nullable=True, unique=None, default=None)
415 414 ui_active = Column(
416 415 "ui_active", Boolean(), nullable=True, unique=None, default=True)
417 416
418 417 def __repr__(self):
419 418 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
420 419 self.ui_key, self.ui_value)
421 420
422 421
423 422 class RepoRhodeCodeSetting(Base, BaseModel):
424 423 __tablename__ = 'repo_rhodecode_settings'
425 424 __table_args__ = (
426 425 UniqueConstraint(
427 426 'app_settings_name', 'repository_id',
428 427 name='uq_repo_rhodecode_setting_name_repo_id'),
429 428 {'extend_existing': True, 'mysql_engine': 'InnoDB',
430 429 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
431 430 )
432 431
433 432 repository_id = Column(
434 433 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
435 434 nullable=False)
436 435 app_settings_id = Column(
437 436 "app_settings_id", Integer(), nullable=False, unique=True,
438 437 default=None, primary_key=True)
439 438 app_settings_name = Column(
440 439 "app_settings_name", String(255), nullable=True, unique=None,
441 440 default=None)
442 441 _app_settings_value = Column(
443 442 "app_settings_value", String(4096), nullable=True, unique=None,
444 443 default=None)
445 444 _app_settings_type = Column(
446 445 "app_settings_type", String(255), nullable=True, unique=None,
447 446 default=None)
448 447
449 448 repository = relationship('Repository')
450 449
451 450 def __init__(self, repository_id, key='', val='', type='unicode'):
452 451 self.repository_id = repository_id
453 452 self.app_settings_name = key
454 453 self.app_settings_type = type
455 454 self.app_settings_value = val
456 455
457 456 @validates('_app_settings_value')
458 457 def validate_settings_value(self, key, val):
459 458 assert type(val) == unicode
460 459 return val
461 460
462 461 @hybrid_property
463 462 def app_settings_value(self):
464 463 v = self._app_settings_value
465 464 type_ = self.app_settings_type
466 465 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
467 466 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
468 467 return converter(v)
469 468
470 469 @app_settings_value.setter
471 470 def app_settings_value(self, val):
472 471 """
473 472 Setter that will always make sure we use unicode in app_settings_value
474 473
475 474 :param val:
476 475 """
477 476 self._app_settings_value = safe_unicode(val)
478 477
479 478 @hybrid_property
480 479 def app_settings_type(self):
481 480 return self._app_settings_type
482 481
483 482 @app_settings_type.setter
484 483 def app_settings_type(self, val):
485 484 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
486 485 if val not in SETTINGS_TYPES:
487 486 raise Exception('type must be one of %s got %s'
488 487 % (SETTINGS_TYPES.keys(), val))
489 488 self._app_settings_type = val
490 489
491 490 def __unicode__(self):
492 491 return u"<%s('%s:%s:%s[%s]')>" % (
493 492 self.__class__.__name__, self.repository.repo_name,
494 493 self.app_settings_name, self.app_settings_value,
495 494 self.app_settings_type
496 495 )
497 496
498 497
499 498 class RepoRhodeCodeUi(Base, BaseModel):
500 499 __tablename__ = 'repo_rhodecode_ui'
501 500 __table_args__ = (
502 501 UniqueConstraint(
503 502 'repository_id', 'ui_section', 'ui_key',
504 503 name='uq_repo_rhodecode_ui_repository_id_section_key'),
505 504 {'extend_existing': True, 'mysql_engine': 'InnoDB',
506 505 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
507 506 )
508 507
509 508 repository_id = Column(
510 509 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
511 510 nullable=False)
512 511 ui_id = Column(
513 512 "ui_id", Integer(), nullable=False, unique=True, default=None,
514 513 primary_key=True)
515 514 ui_section = Column(
516 515 "ui_section", String(255), nullable=True, unique=None, default=None)
517 516 ui_key = Column(
518 517 "ui_key", String(255), nullable=True, unique=None, default=None)
519 518 ui_value = Column(
520 519 "ui_value", String(255), nullable=True, unique=None, default=None)
521 520 ui_active = Column(
522 521 "ui_active", Boolean(), nullable=True, unique=None, default=True)
523 522
524 523 repository = relationship('Repository')
525 524
526 525 def __repr__(self):
527 526 return '<%s[%s:%s]%s=>%s]>' % (
528 527 self.__class__.__name__, self.repository.repo_name,
529 528 self.ui_section, self.ui_key, self.ui_value)
530 529
531 530
532 531 class User(Base, BaseModel):
533 532 __tablename__ = 'users'
534 533 __table_args__ = (
535 534 UniqueConstraint('username'), UniqueConstraint('email'),
536 535 Index('u_username_idx', 'username'),
537 536 Index('u_email_idx', 'email'),
538 537 {'extend_existing': True, 'mysql_engine': 'InnoDB',
539 538 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
540 539 )
541 540 DEFAULT_USER = 'default'
542 541 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
543 542 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
544 543
545 544 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
546 545 username = Column("username", String(255), nullable=True, unique=None, default=None)
547 546 password = Column("password", String(255), nullable=True, unique=None, default=None)
548 547 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
549 548 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
550 549 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
551 550 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
552 551 _email = Column("email", String(255), nullable=True, unique=None, default=None)
553 552 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
554 553 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
555 554
556 555 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
557 556 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
558 557 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
559 558 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
560 559 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
561 560 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
562 561
563 562 user_log = relationship('UserLog')
564 563 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
565 564
566 565 repositories = relationship('Repository')
567 566 repository_groups = relationship('RepoGroup')
568 567 user_groups = relationship('UserGroup')
569 568
570 569 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
571 570 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
572 571
573 572 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
574 573 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
575 574 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
576 575
577 576 group_member = relationship('UserGroupMember', cascade='all')
578 577
579 578 notifications = relationship('UserNotification', cascade='all')
580 579 # notifications assigned to this user
581 580 user_created_notifications = relationship('Notification', cascade='all')
582 581 # comments created by this user
583 582 user_comments = relationship('ChangesetComment', cascade='all')
584 583 # user profile extra info
585 584 user_emails = relationship('UserEmailMap', cascade='all')
586 585 user_ip_map = relationship('UserIpMap', cascade='all')
587 586 user_auth_tokens = relationship('UserApiKeys', cascade='all')
588 587 user_ssh_keys = relationship('UserSshKeys', cascade='all')
589 588
590 589 # gists
591 590 user_gists = relationship('Gist', cascade='all')
592 591 # user pull requests
593 592 user_pull_requests = relationship('PullRequest', cascade='all')
594 593 # external identities
595 594 extenal_identities = relationship(
596 595 'ExternalIdentity',
597 596 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
598 597 cascade='all')
599 598 # review rules
600 599 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
601 600
602 601 def __unicode__(self):
603 602 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
604 603 self.user_id, self.username)
605 604
606 605 @hybrid_property
607 606 def email(self):
608 607 return self._email
609 608
610 609 @email.setter
611 610 def email(self, val):
612 611 self._email = val.lower() if val else None
613 612
614 613 @hybrid_property
615 614 def first_name(self):
616 615 from rhodecode.lib import helpers as h
617 616 if self.name:
618 617 return h.escape(self.name)
619 618 return self.name
620 619
621 620 @hybrid_property
622 621 def last_name(self):
623 622 from rhodecode.lib import helpers as h
624 623 if self.lastname:
625 624 return h.escape(self.lastname)
626 625 return self.lastname
627 626
628 627 @hybrid_property
629 628 def api_key(self):
630 629 """
631 630 Fetch if exist an auth-token with role ALL connected to this user
632 631 """
633 632 user_auth_token = UserApiKeys.query()\
634 633 .filter(UserApiKeys.user_id == self.user_id)\
635 634 .filter(or_(UserApiKeys.expires == -1,
636 635 UserApiKeys.expires >= time.time()))\
637 636 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
638 637 if user_auth_token:
639 638 user_auth_token = user_auth_token.api_key
640 639
641 640 return user_auth_token
642 641
643 642 @api_key.setter
644 643 def api_key(self, val):
645 644 # don't allow to set API key this is deprecated for now
646 645 self._api_key = None
647 646
648 647 @property
649 648 def reviewer_pull_requests(self):
650 649 return PullRequestReviewers.query() \
651 650 .options(joinedload(PullRequestReviewers.pull_request)) \
652 651 .filter(PullRequestReviewers.user_id == self.user_id) \
653 652 .all()
654 653
655 654 @property
656 655 def firstname(self):
657 656 # alias for future
658 657 return self.name
659 658
660 659 @property
661 660 def emails(self):
662 661 other = UserEmailMap.query()\
663 662 .filter(UserEmailMap.user == self) \
664 663 .order_by(UserEmailMap.email_id.asc()) \
665 664 .all()
666 665 return [self.email] + [x.email for x in other]
667 666
668 667 @property
669 668 def auth_tokens(self):
670 669 auth_tokens = self.get_auth_tokens()
671 670 return [x.api_key for x in auth_tokens]
672 671
673 672 def get_auth_tokens(self):
674 673 return UserApiKeys.query()\
675 674 .filter(UserApiKeys.user == self)\
676 675 .order_by(UserApiKeys.user_api_key_id.asc())\
677 676 .all()
678 677
679 678 @LazyProperty
680 679 def feed_token(self):
681 680 return self.get_feed_token()
682 681
683 682 def get_feed_token(self, cache=True):
684 683 feed_tokens = UserApiKeys.query()\
685 684 .filter(UserApiKeys.user == self)\
686 685 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
687 686 if cache:
688 687 feed_tokens = feed_tokens.options(
689 688 FromCache("long_term", "get_user_feed_token_%s" % self.user_id))
690 689
691 690 feed_tokens = feed_tokens.all()
692 691 if feed_tokens:
693 692 return feed_tokens[0].api_key
694 693 return 'NO_FEED_TOKEN_AVAILABLE'
695 694
696 695 @classmethod
697 696 def get(cls, user_id, cache=False):
698 697 if not user_id:
699 698 return
700 699
701 700 user = cls.query()
702 701 if cache:
703 702 user = user.options(
704 703 FromCache("sql_cache_short", "get_users_%s" % user_id))
705 704 return user.get(user_id)
706 705
707 706 @classmethod
708 707 def extra_valid_auth_tokens(cls, user, role=None):
709 708 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
710 709 .filter(or_(UserApiKeys.expires == -1,
711 710 UserApiKeys.expires >= time.time()))
712 711 if role:
713 712 tokens = tokens.filter(or_(UserApiKeys.role == role,
714 713 UserApiKeys.role == UserApiKeys.ROLE_ALL))
715 714 return tokens.all()
716 715
717 716 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
718 717 from rhodecode.lib import auth
719 718
720 719 log.debug('Trying to authenticate user: %s via auth-token, '
721 720 'and roles: %s', self, roles)
722 721
723 722 if not auth_token:
724 723 return False
725 724
726 725 crypto_backend = auth.crypto_backend()
727 726
728 727 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
729 728 tokens_q = UserApiKeys.query()\
730 729 .filter(UserApiKeys.user_id == self.user_id)\
731 730 .filter(or_(UserApiKeys.expires == -1,
732 731 UserApiKeys.expires >= time.time()))
733 732
734 733 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
735 734
736 735 plain_tokens = []
737 736 hash_tokens = []
738 737
739 738 for token in tokens_q.all():
740 739 # verify scope first
741 740 if token.repo_id:
742 741 # token has a scope, we need to verify it
743 742 if scope_repo_id != token.repo_id:
744 743 log.debug(
745 744 'Scope mismatch: token has a set repo scope: %s, '
746 745 'and calling scope is:%s, skipping further checks',
747 746 token.repo, scope_repo_id)
748 747 # token has a scope, and it doesn't match, skip token
749 748 continue
750 749
751 750 if token.api_key.startswith(crypto_backend.ENC_PREF):
752 751 hash_tokens.append(token.api_key)
753 752 else:
754 753 plain_tokens.append(token.api_key)
755 754
756 755 is_plain_match = auth_token in plain_tokens
757 756 if is_plain_match:
758 757 return True
759 758
760 759 for hashed in hash_tokens:
761 760 # TODO(marcink): this is expensive to calculate, but most secure
762 761 match = crypto_backend.hash_check(auth_token, hashed)
763 762 if match:
764 763 return True
765 764
766 765 return False
767 766
768 767 @property
769 768 def ip_addresses(self):
770 769 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
771 770 return [x.ip_addr for x in ret]
772 771
773 772 @property
774 773 def username_and_name(self):
775 774 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
776 775
777 776 @property
778 777 def username_or_name_or_email(self):
779 778 full_name = self.full_name if self.full_name is not ' ' else None
780 779 return self.username or full_name or self.email
781 780
782 781 @property
783 782 def full_name(self):
784 783 return '%s %s' % (self.first_name, self.last_name)
785 784
786 785 @property
787 786 def full_name_or_username(self):
788 787 return ('%s %s' % (self.first_name, self.last_name)
789 788 if (self.first_name and self.last_name) else self.username)
790 789
791 790 @property
792 791 def full_contact(self):
793 792 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
794 793
795 794 @property
796 795 def short_contact(self):
797 796 return '%s %s' % (self.first_name, self.last_name)
798 797
799 798 @property
800 799 def is_admin(self):
801 800 return self.admin
802 801
803 802 def AuthUser(self, **kwargs):
804 803 """
805 804 Returns instance of AuthUser for this user
806 805 """
807 806 from rhodecode.lib.auth import AuthUser
808 807 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
809 808
810 809 @hybrid_property
811 810 def user_data(self):
812 811 if not self._user_data:
813 812 return {}
814 813
815 814 try:
816 815 return json.loads(self._user_data)
817 816 except TypeError:
818 817 return {}
819 818
820 819 @user_data.setter
821 820 def user_data(self, val):
822 821 if not isinstance(val, dict):
823 822 raise Exception('user_data must be dict, got %s' % type(val))
824 823 try:
825 824 self._user_data = json.dumps(val)
826 825 except Exception:
827 826 log.error(traceback.format_exc())
828 827
829 828 @classmethod
830 829 def get_by_username(cls, username, case_insensitive=False,
831 830 cache=False, identity_cache=False):
832 831 session = Session()
833 832
834 833 if case_insensitive:
835 834 q = cls.query().filter(
836 835 func.lower(cls.username) == func.lower(username))
837 836 else:
838 837 q = cls.query().filter(cls.username == username)
839 838
840 839 if cache:
841 840 if identity_cache:
842 841 val = cls.identity_cache(session, 'username', username)
843 842 if val:
844 843 return val
845 844 else:
846 845 cache_key = "get_user_by_name_%s" % _hash_key(username)
847 846 q = q.options(
848 847 FromCache("sql_cache_short", cache_key))
849 848
850 849 return q.scalar()
851 850
852 851 @classmethod
853 852 def get_by_auth_token(cls, auth_token, cache=False):
854 853 q = UserApiKeys.query()\
855 854 .filter(UserApiKeys.api_key == auth_token)\
856 855 .filter(or_(UserApiKeys.expires == -1,
857 856 UserApiKeys.expires >= time.time()))
858 857 if cache:
859 858 q = q.options(
860 859 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
861 860
862 861 match = q.first()
863 862 if match:
864 863 return match.user
865 864
866 865 @classmethod
867 866 def get_by_email(cls, email, case_insensitive=False, cache=False):
868 867
869 868 if case_insensitive:
870 869 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
871 870
872 871 else:
873 872 q = cls.query().filter(cls.email == email)
874 873
875 874 email_key = _hash_key(email)
876 875 if cache:
877 876 q = q.options(
878 877 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
879 878
880 879 ret = q.scalar()
881 880 if ret is None:
882 881 q = UserEmailMap.query()
883 882 # try fetching in alternate email map
884 883 if case_insensitive:
885 884 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
886 885 else:
887 886 q = q.filter(UserEmailMap.email == email)
888 887 q = q.options(joinedload(UserEmailMap.user))
889 888 if cache:
890 889 q = q.options(
891 890 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
892 891 ret = getattr(q.scalar(), 'user', None)
893 892
894 893 return ret
895 894
896 895 @classmethod
897 896 def get_from_cs_author(cls, author):
898 897 """
899 898 Tries to get User objects out of commit author string
900 899
901 900 :param author:
902 901 """
903 902 from rhodecode.lib.helpers import email, author_name
904 903 # Valid email in the attribute passed, see if they're in the system
905 904 _email = email(author)
906 905 if _email:
907 906 user = cls.get_by_email(_email, case_insensitive=True)
908 907 if user:
909 908 return user
910 909 # Maybe we can match by username?
911 910 _author = author_name(author)
912 911 user = cls.get_by_username(_author, case_insensitive=True)
913 912 if user:
914 913 return user
915 914
916 915 def update_userdata(self, **kwargs):
917 916 usr = self
918 917 old = usr.user_data
919 918 old.update(**kwargs)
920 919 usr.user_data = old
921 920 Session().add(usr)
922 921 log.debug('updated userdata with ', kwargs)
923 922
924 923 def update_lastlogin(self):
925 924 """Update user lastlogin"""
926 925 self.last_login = datetime.datetime.now()
927 926 Session().add(self)
928 927 log.debug('updated user %s lastlogin', self.username)
929 928
930 929 def update_lastactivity(self):
931 930 """Update user lastactivity"""
932 931 self.last_activity = datetime.datetime.now()
933 932 Session().add(self)
934 933 log.debug('updated user `%s` last activity', self.username)
935 934
936 935 def update_password(self, new_password):
937 936 from rhodecode.lib.auth import get_crypt_password
938 937
939 938 self.password = get_crypt_password(new_password)
940 939 Session().add(self)
941 940
942 941 @classmethod
943 942 def get_first_super_admin(cls):
944 943 user = User.query().filter(User.admin == true()).first()
945 944 if user is None:
946 945 raise Exception('FATAL: Missing administrative account!')
947 946 return user
948 947
949 948 @classmethod
950 949 def get_all_super_admins(cls):
951 950 """
952 951 Returns all admin accounts sorted by username
953 952 """
954 953 return User.query().filter(User.admin == true())\
955 954 .order_by(User.username.asc()).all()
956 955
957 956 @classmethod
958 957 def get_default_user(cls, cache=False, refresh=False):
959 958 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
960 959 if user is None:
961 960 raise Exception('FATAL: Missing default account!')
962 961 if refresh:
963 962 # The default user might be based on outdated state which
964 963 # has been loaded from the cache.
965 964 # A call to refresh() ensures that the
966 965 # latest state from the database is used.
967 966 Session().refresh(user)
968 967 return user
969 968
970 969 def _get_default_perms(self, user, suffix=''):
971 970 from rhodecode.model.permission import PermissionModel
972 971 return PermissionModel().get_default_perms(user.user_perms, suffix)
973 972
974 973 def get_default_perms(self, suffix=''):
975 974 return self._get_default_perms(self, suffix)
976 975
977 976 def get_api_data(self, include_secrets=False, details='full'):
978 977 """
979 978 Common function for generating user related data for API
980 979
981 980 :param include_secrets: By default secrets in the API data will be replaced
982 981 by a placeholder value to prevent exposing this data by accident. In case
983 982 this data shall be exposed, set this flag to ``True``.
984 983
985 984 :param details: details can be 'basic|full' basic gives only a subset of
986 985 the available user information that includes user_id, name and emails.
987 986 """
988 987 user = self
989 988 user_data = self.user_data
990 989 data = {
991 990 'user_id': user.user_id,
992 991 'username': user.username,
993 992 'firstname': user.name,
994 993 'lastname': user.lastname,
995 994 'email': user.email,
996 995 'emails': user.emails,
997 996 }
998 997 if details == 'basic':
999 998 return data
1000 999
1001 1000 auth_token_length = 40
1002 1001 auth_token_replacement = '*' * auth_token_length
1003 1002
1004 1003 extras = {
1005 1004 'auth_tokens': [auth_token_replacement],
1006 1005 'active': user.active,
1007 1006 'admin': user.admin,
1008 1007 'extern_type': user.extern_type,
1009 1008 'extern_name': user.extern_name,
1010 1009 'last_login': user.last_login,
1011 1010 'last_activity': user.last_activity,
1012 1011 'ip_addresses': user.ip_addresses,
1013 1012 'language': user_data.get('language')
1014 1013 }
1015 1014 data.update(extras)
1016 1015
1017 1016 if include_secrets:
1018 1017 data['auth_tokens'] = user.auth_tokens
1019 1018 return data
1020 1019
1021 1020 def __json__(self):
1022 1021 data = {
1023 1022 'full_name': self.full_name,
1024 1023 'full_name_or_username': self.full_name_or_username,
1025 1024 'short_contact': self.short_contact,
1026 1025 'full_contact': self.full_contact,
1027 1026 }
1028 1027 data.update(self.get_api_data())
1029 1028 return data
1030 1029
1031 1030
1032 1031 class UserApiKeys(Base, BaseModel):
1033 1032 __tablename__ = 'user_api_keys'
1034 1033 __table_args__ = (
1035 1034 Index('uak_api_key_idx', 'api_key', unique=True),
1036 1035 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1037 1036 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1038 1037 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1039 1038 )
1040 1039 __mapper_args__ = {}
1041 1040
1042 1041 # ApiKey role
1043 1042 ROLE_ALL = 'token_role_all'
1044 1043 ROLE_HTTP = 'token_role_http'
1045 1044 ROLE_VCS = 'token_role_vcs'
1046 1045 ROLE_API = 'token_role_api'
1047 1046 ROLE_FEED = 'token_role_feed'
1048 1047 ROLE_PASSWORD_RESET = 'token_password_reset'
1049 1048
1050 1049 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1051 1050
1052 1051 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1053 1052 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1054 1053 api_key = Column("api_key", String(255), nullable=False, unique=True)
1055 1054 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1056 1055 expires = Column('expires', Float(53), nullable=False)
1057 1056 role = Column('role', String(255), nullable=True)
1058 1057 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1059 1058
1060 1059 # scope columns
1061 1060 repo_id = Column(
1062 1061 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1063 1062 nullable=True, unique=None, default=None)
1064 1063 repo = relationship('Repository', lazy='joined')
1065 1064
1066 1065 repo_group_id = Column(
1067 1066 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1068 1067 nullable=True, unique=None, default=None)
1069 1068 repo_group = relationship('RepoGroup', lazy='joined')
1070 1069
1071 1070 user = relationship('User', lazy='joined')
1072 1071
1073 1072 def __unicode__(self):
1074 1073 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1075 1074
1076 1075 def __json__(self):
1077 1076 data = {
1078 1077 'auth_token': self.api_key,
1079 1078 'role': self.role,
1080 1079 'scope': self.scope_humanized,
1081 1080 'expired': self.expired
1082 1081 }
1083 1082 return data
1084 1083
1085 1084 def get_api_data(self, include_secrets=False):
1086 1085 data = self.__json__()
1087 1086 if include_secrets:
1088 1087 return data
1089 1088 else:
1090 1089 data['auth_token'] = self.token_obfuscated
1091 1090 return data
1092 1091
1093 1092 @hybrid_property
1094 1093 def description_safe(self):
1095 1094 from rhodecode.lib import helpers as h
1096 1095 return h.escape(self.description)
1097 1096
1098 1097 @property
1099 1098 def expired(self):
1100 1099 if self.expires == -1:
1101 1100 return False
1102 1101 return time.time() > self.expires
1103 1102
1104 1103 @classmethod
1105 1104 def _get_role_name(cls, role):
1106 1105 return {
1107 1106 cls.ROLE_ALL: _('all'),
1108 1107 cls.ROLE_HTTP: _('http/web interface'),
1109 1108 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1110 1109 cls.ROLE_API: _('api calls'),
1111 1110 cls.ROLE_FEED: _('feed access'),
1112 1111 }.get(role, role)
1113 1112
1114 1113 @property
1115 1114 def role_humanized(self):
1116 1115 return self._get_role_name(self.role)
1117 1116
1118 1117 def _get_scope(self):
1119 1118 if self.repo:
1120 1119 return repr(self.repo)
1121 1120 if self.repo_group:
1122 1121 return repr(self.repo_group) + ' (recursive)'
1123 1122 return 'global'
1124 1123
1125 1124 @property
1126 1125 def scope_humanized(self):
1127 1126 return self._get_scope()
1128 1127
1129 1128 @property
1130 1129 def token_obfuscated(self):
1131 1130 if self.api_key:
1132 1131 return self.api_key[:4] + "****"
1133 1132
1134 1133
1135 1134 class UserEmailMap(Base, BaseModel):
1136 1135 __tablename__ = 'user_email_map'
1137 1136 __table_args__ = (
1138 1137 Index('uem_email_idx', 'email'),
1139 1138 UniqueConstraint('email'),
1140 1139 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1141 1140 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1142 1141 )
1143 1142 __mapper_args__ = {}
1144 1143
1145 1144 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1146 1145 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1147 1146 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1148 1147 user = relationship('User', lazy='joined')
1149 1148
1150 1149 @validates('_email')
1151 1150 def validate_email(self, key, email):
1152 1151 # check if this email is not main one
1153 1152 main_email = Session().query(User).filter(User.email == email).scalar()
1154 1153 if main_email is not None:
1155 1154 raise AttributeError('email %s is present is user table' % email)
1156 1155 return email
1157 1156
1158 1157 @hybrid_property
1159 1158 def email(self):
1160 1159 return self._email
1161 1160
1162 1161 @email.setter
1163 1162 def email(self, val):
1164 1163 self._email = val.lower() if val else None
1165 1164
1166 1165
1167 1166 class UserIpMap(Base, BaseModel):
1168 1167 __tablename__ = 'user_ip_map'
1169 1168 __table_args__ = (
1170 1169 UniqueConstraint('user_id', 'ip_addr'),
1171 1170 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1172 1171 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1173 1172 )
1174 1173 __mapper_args__ = {}
1175 1174
1176 1175 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1177 1176 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1178 1177 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1179 1178 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1180 1179 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1181 1180 user = relationship('User', lazy='joined')
1182 1181
1183 1182 @hybrid_property
1184 1183 def description_safe(self):
1185 1184 from rhodecode.lib import helpers as h
1186 1185 return h.escape(self.description)
1187 1186
1188 1187 @classmethod
1189 1188 def _get_ip_range(cls, ip_addr):
1190 1189 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1191 1190 return [str(net.network_address), str(net.broadcast_address)]
1192 1191
1193 1192 def __json__(self):
1194 1193 return {
1195 1194 'ip_addr': self.ip_addr,
1196 1195 'ip_range': self._get_ip_range(self.ip_addr),
1197 1196 }
1198 1197
1199 1198 def __unicode__(self):
1200 1199 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1201 1200 self.user_id, self.ip_addr)
1202 1201
1203 1202
1204 1203 class UserSshKeys(Base, BaseModel):
1205 1204 __tablename__ = 'user_ssh_keys'
1206 1205 __table_args__ = (
1207 1206 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1208 1207
1209 1208 UniqueConstraint('ssh_key_fingerprint'),
1210 1209
1211 1210 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1212 1211 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
1213 1212 )
1214 1213 __mapper_args__ = {}
1215 1214
1216 1215 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1217 1216 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1218 1217 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1219 1218
1220 1219 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1221 1220
1222 1221 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1223 1222 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1224 1223 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1225 1224
1226 1225 user = relationship('User', lazy='joined')
1227 1226
1228 1227 def __json__(self):
1229 1228 data = {
1230 1229 'ssh_fingerprint': self.ssh_key_fingerprint,
1231 1230 'description': self.description,
1232 1231 'created_on': self.created_on
1233 1232 }
1234 1233 return data
1235 1234
1236 1235 def get_api_data(self):
1237 1236 data = self.__json__()
1238 1237 return data
1239 1238
1240 1239
1241 1240 class UserLog(Base, BaseModel):
1242 1241 __tablename__ = 'user_logs'
1243 1242 __table_args__ = (
1244 1243 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1245 1244 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1246 1245 )
1247 1246 VERSION_1 = 'v1'
1248 1247 VERSION_2 = 'v2'
1249 1248 VERSIONS = [VERSION_1, VERSION_2]
1250 1249
1251 1250 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1252 1251 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1253 1252 username = Column("username", String(255), nullable=True, unique=None, default=None)
1254 1253 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1255 1254 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1256 1255 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1257 1256 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1258 1257 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1259 1258
1260 1259 version = Column("version", String(255), nullable=True, default=VERSION_1)
1261 1260 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1262 1261 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1263 1262
1264 1263 def __unicode__(self):
1265 1264 return u"<%s('id:%s:%s')>" % (
1266 1265 self.__class__.__name__, self.repository_name, self.action)
1267 1266
1268 1267 def __json__(self):
1269 1268 return {
1270 1269 'user_id': self.user_id,
1271 1270 'username': self.username,
1272 1271 'repository_id': self.repository_id,
1273 1272 'repository_name': self.repository_name,
1274 1273 'user_ip': self.user_ip,
1275 1274 'action_date': self.action_date,
1276 1275 'action': self.action,
1277 1276 }
1278 1277
1279 1278 @hybrid_property
1280 1279 def entry_id(self):
1281 1280 return self.user_log_id
1282 1281
1283 1282 @property
1284 1283 def action_as_day(self):
1285 1284 return datetime.date(*self.action_date.timetuple()[:3])
1286 1285
1287 1286 user = relationship('User')
1288 1287 repository = relationship('Repository', cascade='')
1289 1288
1290 1289
1291 1290 class UserGroup(Base, BaseModel):
1292 1291 __tablename__ = 'users_groups'
1293 1292 __table_args__ = (
1294 1293 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1295 1294 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1296 1295 )
1297 1296
1298 1297 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1299 1298 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1300 1299 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1301 1300 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1302 1301 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1303 1302 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1304 1303 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1305 1304 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1306 1305
1307 1306 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1308 1307 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1309 1308 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1310 1309 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1311 1310 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1312 1311 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1313 1312
1314 1313 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1315 1314 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1316 1315
1317 1316 @classmethod
1318 1317 def _load_group_data(cls, column):
1319 1318 if not column:
1320 1319 return {}
1321 1320
1322 1321 try:
1323 1322 return json.loads(column) or {}
1324 1323 except TypeError:
1325 1324 return {}
1326 1325
1327 1326 @hybrid_property
1328 1327 def description_safe(self):
1329 1328 from rhodecode.lib import helpers as h
1330 return h.escape(self.description)
1329 return h.escape(self.user_group_description)
1331 1330
1332 1331 @hybrid_property
1333 1332 def group_data(self):
1334 1333 return self._load_group_data(self._group_data)
1335 1334
1336 1335 @group_data.expression
1337 1336 def group_data(self, **kwargs):
1338 1337 return self._group_data
1339 1338
1340 1339 @group_data.setter
1341 1340 def group_data(self, val):
1342 1341 try:
1343 1342 self._group_data = json.dumps(val)
1344 1343 except Exception:
1345 1344 log.error(traceback.format_exc())
1346 1345
1347 1346 def __unicode__(self):
1348 1347 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1349 1348 self.users_group_id,
1350 1349 self.users_group_name)
1351 1350
1352 1351 @classmethod
1353 1352 def get_by_group_name(cls, group_name, cache=False,
1354 1353 case_insensitive=False):
1355 1354 if case_insensitive:
1356 1355 q = cls.query().filter(func.lower(cls.users_group_name) ==
1357 1356 func.lower(group_name))
1358 1357
1359 1358 else:
1360 1359 q = cls.query().filter(cls.users_group_name == group_name)
1361 1360 if cache:
1362 1361 q = q.options(
1363 1362 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1364 1363 return q.scalar()
1365 1364
1366 1365 @classmethod
1367 1366 def get(cls, user_group_id, cache=False):
1368 1367 if not user_group_id:
1369 1368 return
1370 1369
1371 1370 user_group = cls.query()
1372 1371 if cache:
1373 1372 user_group = user_group.options(
1374 1373 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1375 1374 return user_group.get(user_group_id)
1376 1375
1377 1376 def permissions(self, with_admins=True, with_owner=True):
1378 1377 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1379 1378 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1380 1379 joinedload(UserUserGroupToPerm.user),
1381 1380 joinedload(UserUserGroupToPerm.permission),)
1382 1381
1383 1382 # get owners and admins and permissions. We do a trick of re-writing
1384 1383 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1385 1384 # has a global reference and changing one object propagates to all
1386 1385 # others. This means if admin is also an owner admin_row that change
1387 1386 # would propagate to both objects
1388 1387 perm_rows = []
1389 1388 for _usr in q.all():
1390 1389 usr = AttributeDict(_usr.user.get_dict())
1391 1390 usr.permission = _usr.permission.permission_name
1392 1391 perm_rows.append(usr)
1393 1392
1394 1393 # filter the perm rows by 'default' first and then sort them by
1395 1394 # admin,write,read,none permissions sorted again alphabetically in
1396 1395 # each group
1397 1396 perm_rows = sorted(perm_rows, key=display_user_sort)
1398 1397
1399 1398 _admin_perm = 'usergroup.admin'
1400 1399 owner_row = []
1401 1400 if with_owner:
1402 1401 usr = AttributeDict(self.user.get_dict())
1403 1402 usr.owner_row = True
1404 1403 usr.permission = _admin_perm
1405 1404 owner_row.append(usr)
1406 1405
1407 1406 super_admin_rows = []
1408 1407 if with_admins:
1409 1408 for usr in User.get_all_super_admins():
1410 1409 # if this admin is also owner, don't double the record
1411 1410 if usr.user_id == owner_row[0].user_id:
1412 1411 owner_row[0].admin_row = True
1413 1412 else:
1414 1413 usr = AttributeDict(usr.get_dict())
1415 1414 usr.admin_row = True
1416 1415 usr.permission = _admin_perm
1417 1416 super_admin_rows.append(usr)
1418 1417
1419 1418 return super_admin_rows + owner_row + perm_rows
1420 1419
1421 1420 def permission_user_groups(self):
1422 1421 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1423 1422 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1424 1423 joinedload(UserGroupUserGroupToPerm.target_user_group),
1425 1424 joinedload(UserGroupUserGroupToPerm.permission),)
1426 1425
1427 1426 perm_rows = []
1428 1427 for _user_group in q.all():
1429 1428 usr = AttributeDict(_user_group.user_group.get_dict())
1430 1429 usr.permission = _user_group.permission.permission_name
1431 1430 perm_rows.append(usr)
1432 1431
1433 1432 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1434 1433 return perm_rows
1435 1434
1436 1435 def _get_default_perms(self, user_group, suffix=''):
1437 1436 from rhodecode.model.permission import PermissionModel
1438 1437 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1439 1438
1440 1439 def get_default_perms(self, suffix=''):
1441 1440 return self._get_default_perms(self, suffix)
1442 1441
1443 1442 def get_api_data(self, with_group_members=True, include_secrets=False):
1444 1443 """
1445 1444 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1446 1445 basically forwarded.
1447 1446
1448 1447 """
1449 1448 user_group = self
1450 1449 data = {
1451 1450 'users_group_id': user_group.users_group_id,
1452 1451 'group_name': user_group.users_group_name,
1453 1452 'group_description': user_group.user_group_description,
1454 1453 'active': user_group.users_group_active,
1455 1454 'owner': user_group.user.username,
1456 1455 'owner_email': user_group.user.email,
1457 1456 }
1458 1457
1459 1458 if with_group_members:
1460 1459 users = []
1461 1460 for user in user_group.members:
1462 1461 user = user.user
1463 1462 users.append(user.get_api_data(include_secrets=include_secrets))
1464 1463 data['users'] = users
1465 1464
1466 1465 return data
1467 1466
1468 1467
1469 1468 class UserGroupMember(Base, BaseModel):
1470 1469 __tablename__ = 'users_groups_members'
1471 1470 __table_args__ = (
1472 1471 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1473 1472 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1474 1473 )
1475 1474
1476 1475 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1477 1476 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1478 1477 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1479 1478
1480 1479 user = relationship('User', lazy='joined')
1481 1480 users_group = relationship('UserGroup')
1482 1481
1483 1482 def __init__(self, gr_id='', u_id=''):
1484 1483 self.users_group_id = gr_id
1485 1484 self.user_id = u_id
1486 1485
1487 1486
1488 1487 class RepositoryField(Base, BaseModel):
1489 1488 __tablename__ = 'repositories_fields'
1490 1489 __table_args__ = (
1491 1490 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1492 1491 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1493 1492 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1494 1493 )
1495 1494 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1496 1495
1497 1496 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1498 1497 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1499 1498 field_key = Column("field_key", String(250))
1500 1499 field_label = Column("field_label", String(1024), nullable=False)
1501 1500 field_value = Column("field_value", String(10000), nullable=False)
1502 1501 field_desc = Column("field_desc", String(1024), nullable=False)
1503 1502 field_type = Column("field_type", String(255), nullable=False, unique=None)
1504 1503 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1505 1504
1506 1505 repository = relationship('Repository')
1507 1506
1508 1507 @property
1509 1508 def field_key_prefixed(self):
1510 1509 return 'ex_%s' % self.field_key
1511 1510
1512 1511 @classmethod
1513 1512 def un_prefix_key(cls, key):
1514 1513 if key.startswith(cls.PREFIX):
1515 1514 return key[len(cls.PREFIX):]
1516 1515 return key
1517 1516
1518 1517 @classmethod
1519 1518 def get_by_key_name(cls, key, repo):
1520 1519 row = cls.query()\
1521 1520 .filter(cls.repository == repo)\
1522 1521 .filter(cls.field_key == key).scalar()
1523 1522 return row
1524 1523
1525 1524
1526 1525 class Repository(Base, BaseModel):
1527 1526 __tablename__ = 'repositories'
1528 1527 __table_args__ = (
1529 1528 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1530 1529 {'extend_existing': True, 'mysql_engine': 'InnoDB',
1531 1530 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
1532 1531 )
1533 1532 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1534 1533 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1535 1534
1536 1535 STATE_CREATED = 'repo_state_created'
1537 1536 STATE_PENDING = 'repo_state_pending'
1538 1537 STATE_ERROR = 'repo_state_error'
1539 1538
1540 1539 LOCK_AUTOMATIC = 'lock_auto'
1541 1540 LOCK_API = 'lock_api'
1542 1541 LOCK_WEB = 'lock_web'
1543 1542 LOCK_PULL = 'lock_pull'
1544 1543
1545 1544 NAME_SEP = URL_SEP
1546 1545
1547 1546 repo_id = Column(
1548 1547 "repo_id", Integer(), nullable=False, unique=True, default=None,
1549 1548 primary_key=True)
1550 1549 _repo_name = Column(
1551 1550 "repo_name", Text(), nullable=False, default=None)
1552 1551 _repo_name_hash = Column(
1553 1552 "repo_name_hash", String(255), nullable=False, unique=True)
1554 1553 repo_state = Column("repo_state", String(255), nullable=True)
1555 1554
1556 1555 clone_uri = Column(
1557 1556 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1558 1557 default=None)
1559 1558 repo_type = Column(
1560 1559 "repo_type", String(255), nullable=False, unique=False, default=None)
1561 1560 user_id = Column(
1562 1561 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1563 1562 unique=False, default=None)
1564 1563 private = Column(
1565 1564 "private", Boolean(), nullable=True, unique=None, default=None)
1566 1565 enable_statistics = Column(
1567 1566 "statistics", Boolean(), nullable=True, unique=None, default=True)
1568 1567 enable_downloads = Column(
1569 1568 "downloads", Boolean(), nullable=True, unique=None, default=True)
1570 1569 description = Column(
1571 1570 "description", String(10000), nullable=True, unique=None, default=None)
1572 1571 created_on = Column(
1573 1572 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1574 1573 default=datetime.datetime.now)
1575 1574 updated_on = Column(
1576 1575 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1577 1576 default=datetime.datetime.now)
1578 1577 _landing_revision = Column(
1579 1578 "landing_revision", String(255), nullable=False, unique=False,
1580 1579 default=None)
1581 1580 enable_locking = Column(
1582 1581 "enable_locking", Boolean(), nullable=False, unique=None,
1583 1582 default=False)
1584 1583 _locked = Column(
1585 1584 "locked", String(255), nullable=True, unique=False, default=None)
1586 1585 _changeset_cache = Column(
1587 1586 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1588 1587
1589 1588 fork_id = Column(
1590 1589 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1591 1590 nullable=True, unique=False, default=None)
1592 1591 group_id = Column(
1593 1592 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1594 1593 unique=False, default=None)
1595 1594
1596 1595 user = relationship('User', lazy='joined')
1597 1596 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1598 1597 group = relationship('RepoGroup', lazy='joined')
1599 1598 repo_to_perm = relationship(
1600 1599 'UserRepoToPerm', cascade='all',
1601 1600 order_by='UserRepoToPerm.repo_to_perm_id')
1602 1601 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1603 1602 stats = relationship('Statistics', cascade='all', uselist=False)
1604 1603
1605 1604 followers = relationship(
1606 1605 'UserFollowing',
1607 1606 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1608 1607 cascade='all')
1609 1608 extra_fields = relationship(
1610 1609 'RepositoryField', cascade="all, delete, delete-orphan")
1611 1610 logs = relationship('UserLog')
1612 1611 comments = relationship(
1613 1612 'ChangesetComment', cascade="all, delete, delete-orphan")
1614 1613 pull_requests_source = relationship(
1615 1614 'PullRequest',
1616 1615 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1617 1616 cascade="all, delete, delete-orphan")
1618 1617 pull_requests_target = relationship(
1619 1618 'PullRequest',
1620 1619 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1621 1620 cascade="all, delete, delete-orphan")
1622 1621 ui = relationship('RepoRhodeCodeUi', cascade="all")
1623 1622 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1624 1623 integrations = relationship('Integration',
1625 1624 cascade="all, delete, delete-orphan")
1626 1625
1627 1626 scoped_tokens = relationship('UserApiKeys', cascade="all")
1628 1627
1629 1628 def __unicode__(self):
1630 1629 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1631 1630 safe_unicode(self.repo_name))
1632 1631
1633 1632 @hybrid_property
1634 1633 def description_safe(self):
1635 1634 from rhodecode.lib import helpers as h
1636 1635 return h.escape(self.description)
1637 1636
1638 1637 @hybrid_property
1639 1638 def landing_rev(self):
1640 1639 # always should return [rev_type, rev]
1641 1640 if self._landing_revision:
1642 1641 _rev_info = self._landing_revision.split(':')
1643 1642 if len(_rev_info) < 2:
1644 1643 _rev_info.insert(0, 'rev')
1645 1644 return [_rev_info[0], _rev_info[1]]
1646 1645 return [None, None]
1647 1646
1648 1647 @landing_rev.setter
1649 1648 def landing_rev(self, val):
1650 1649 if ':' not in val:
1651 1650 raise ValueError('value must be delimited with `:` and consist '
1652 1651 'of <rev_type>:<rev>, got %s instead' % val)
1653 1652 self._landing_revision = val
1654 1653
1655 1654 @hybrid_property
1656 1655 def locked(self):
1657 1656 if self._locked:
1658 1657 user_id, timelocked, reason = self._locked.split(':')
1659 1658 lock_values = int(user_id), timelocked, reason
1660 1659 else:
1661 1660 lock_values = [None, None, None]
1662 1661 return lock_values
1663 1662
1664 1663 @locked.setter
1665 1664 def locked(self, val):
1666 1665 if val and isinstance(val, (list, tuple)):
1667 1666 self._locked = ':'.join(map(str, val))
1668 1667 else:
1669 1668 self._locked = None
1670 1669
1671 1670 @hybrid_property
1672 1671 def changeset_cache(self):
1673 1672 from rhodecode.lib.vcs.backends.base import EmptyCommit
1674 1673 dummy = EmptyCommit().__json__()
1675 1674 if not self._changeset_cache:
1676 1675 return dummy
1677 1676 try:
1678 1677 return json.loads(self._changeset_cache)
1679 1678 except TypeError:
1680 1679 return dummy
1681 1680 except Exception:
1682 1681 log.error(traceback.format_exc())
1683 1682 return dummy
1684 1683
1685 1684 @changeset_cache.setter
1686 1685 def changeset_cache(self, val):
1687 1686 try:
1688 1687 self._changeset_cache = json.dumps(val)
1689 1688 except Exception:
1690 1689 log.error(traceback.format_exc())
1691 1690
1692 1691 @hybrid_property
1693 1692 def repo_name(self):
1694 1693 return self._repo_name
1695 1694
1696 1695 @repo_name.setter
1697 1696 def repo_name(self, value):
1698 1697 self._repo_name = value
1699 1698 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1700 1699
1701 1700 @classmethod
1702 1701 def normalize_repo_name(cls, repo_name):
1703 1702 """
1704 1703 Normalizes os specific repo_name to the format internally stored inside
1705 1704 database using URL_SEP
1706 1705
1707 1706 :param cls:
1708 1707 :param repo_name:
1709 1708 """
1710 1709 return cls.NAME_SEP.join(repo_name.split(os.sep))
1711 1710
1712 1711 @classmethod
1713 1712 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1714 1713 session = Session()
1715 1714 q = session.query(cls).filter(cls.repo_name == repo_name)
1716 1715
1717 1716 if cache:
1718 1717 if identity_cache:
1719 1718 val = cls.identity_cache(session, 'repo_name', repo_name)
1720 1719 if val:
1721 1720 return val
1722 1721 else:
1723 1722 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1724 1723 q = q.options(
1725 1724 FromCache("sql_cache_short", cache_key))
1726 1725
1727 1726 return q.scalar()
1728 1727
1729 1728 @classmethod
1730 1729 def get_by_id_or_repo_name(cls, repoid):
1731 1730 if isinstance(repoid, (int, long)):
1732 1731 try:
1733 1732 repo = cls.get(repoid)
1734 1733 except ValueError:
1735 1734 repo = None
1736 1735 else:
1737 1736 repo = cls.get_by_repo_name(repoid)
1738 1737 return repo
1739 1738
1740 1739 @classmethod
1741 1740 def get_by_full_path(cls, repo_full_path):
1742 1741 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1743 1742 repo_name = cls.normalize_repo_name(repo_name)
1744 1743 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1745 1744
1746 1745 @classmethod
1747 1746 def get_repo_forks(cls, repo_id):
1748 1747 return cls.query().filter(Repository.fork_id == repo_id)
1749 1748
1750 1749 @classmethod
1751 1750 def base_path(cls):
1752 1751 """
1753 1752 Returns base path when all repos are stored
1754 1753
1755 1754 :param cls:
1756 1755 """
1757 1756 q = Session().query(RhodeCodeUi)\
1758 1757 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1759 1758 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1760 1759 return q.one().ui_value
1761 1760
1762 1761 @classmethod
1763 1762 def is_valid(cls, repo_name):
1764 1763 """
1765 1764 returns True if given repo name is a valid filesystem repository
1766 1765
1767 1766 :param cls:
1768 1767 :param repo_name:
1769 1768 """
1770 1769 from rhodecode.lib.utils import is_valid_repo
1771 1770
1772 1771 return is_valid_repo(repo_name, cls.base_path())
1773 1772
1774 1773 @classmethod
1775 1774 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1776 1775 case_insensitive=True):
1777 1776 q = Repository.query()
1778 1777
1779 1778 if not isinstance(user_id, Optional):
1780 1779 q = q.filter(Repository.user_id == user_id)
1781 1780
1782 1781 if not isinstance(group_id, Optional):
1783 1782 q = q.filter(Repository.group_id == group_id)
1784 1783
1785 1784 if case_insensitive:
1786 1785 q = q.order_by(func.lower(Repository.repo_name))
1787 1786 else:
1788 1787 q = q.order_by(Repository.repo_name)
1789 1788 return q.all()
1790 1789
1791 1790 @property
1792 1791 def forks(self):
1793 1792 """
1794 1793 Return forks of this repo
1795 1794 """
1796 1795 return Repository.get_repo_forks(self.repo_id)
1797 1796
1798 1797 @property
1799 1798 def parent(self):
1800 1799 """
1801 1800 Returns fork parent
1802 1801 """
1803 1802 return self.fork
1804 1803
1805 1804 @property
1806 1805 def just_name(self):
1807 1806 return self.repo_name.split(self.NAME_SEP)[-1]
1808 1807
1809 1808 @property
1810 1809 def groups_with_parents(self):
1811 1810 groups = []
1812 1811 if self.group is None:
1813 1812 return groups
1814 1813
1815 1814 cur_gr = self.group
1816 1815 groups.insert(0, cur_gr)
1817 1816 while 1:
1818 1817 gr = getattr(cur_gr, 'parent_group', None)
1819 1818 cur_gr = cur_gr.parent_group
1820 1819 if gr is None:
1821 1820 break
1822 1821 groups.insert(0, gr)
1823 1822
1824 1823 return groups
1825 1824
1826 1825 @property
1827 1826 def groups_and_repo(self):
1828 1827 return self.groups_with_parents, self
1829 1828
1830 1829 @LazyProperty
1831 1830 def repo_path(self):
1832 1831 """
1833 1832 Returns base full path for that repository means where it actually
1834 1833 exists on a filesystem
1835 1834 """
1836 1835 q = Session().query(RhodeCodeUi).filter(
1837 1836 RhodeCodeUi.ui_key == self.NAME_SEP)
1838 1837 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1839 1838 return q.one().ui_value
1840 1839
1841 1840 @property
1842 1841 def repo_full_path(self):
1843 1842 p = [self.repo_path]
1844 1843 # we need to split the name by / since this is how we store the
1845 1844 # names in the database, but that eventually needs to be converted
1846 1845 # into a valid system path
1847 1846 p += self.repo_name.split(self.NAME_SEP)
1848 1847 return os.path.join(*map(safe_unicode, p))
1849 1848
1850 1849 @property
1851 1850 def cache_keys(self):
1852 1851 """
1853 1852 Returns associated cache keys for that repo
1854 1853 """
1855 1854 return CacheKey.query()\
1856 1855 .filter(CacheKey.cache_args == self.repo_name)\
1857 1856 .order_by(CacheKey.cache_key)\
1858 1857 .all()
1859 1858
1860 1859 def get_new_name(self, repo_name):
1861 1860 """
1862 1861 returns new full repository name based on assigned group and new new
1863 1862
1864 1863 :param group_name:
1865 1864 """
1866 1865 path_prefix = self.group.full_path_splitted if self.group else []
1867 1866 return self.NAME_SEP.join(path_prefix + [repo_name])
1868 1867
1869 1868 @property
1870 1869 def _config(self):
1871 1870 """
1872 1871 Returns db based config object.
1873 1872 """
1874 1873 from rhodecode.lib.utils import make_db_config
1875 1874 return make_db_config(clear_session=False, repo=self)
1876 1875
1877 1876 def permissions(self, with_admins=True, with_owner=True):
1878 1877 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1879 1878 q = q.options(joinedload(UserRepoToPerm.repository),
1880 1879 joinedload(UserRepoToPerm.user),
1881 1880 joinedload(UserRepoToPerm.permission),)
1882 1881
1883 1882 # get owners and admins and permissions. We do a trick of re-writing
1884 1883 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1885 1884 # has a global reference and changing one object propagates to all
1886 1885 # others. This means if admin is also an owner admin_row that change
1887 1886 # would propagate to both objects
1888 1887 perm_rows = []
1889 1888 for _usr in q.all():
1890 1889 usr = AttributeDict(_usr.user.get_dict())
1891 1890 usr.permission = _usr.permission.permission_name
1892 1891 perm_rows.append(usr)
1893 1892
1894 1893 # filter the perm rows by 'default' first and then sort them by
1895 1894 # admin,write,read,none permissions sorted again alphabetically in
1896 1895 # each group
1897 1896 perm_rows = sorted(perm_rows, key=display_user_sort)
1898 1897
1899 1898 _admin_perm = 'repository.admin'
1900 1899 owner_row = []
1901 1900 if with_owner:
1902 1901 usr = AttributeDict(self.user.get_dict())
1903 1902 usr.owner_row = True
1904 1903 usr.permission = _admin_perm
1905 1904 owner_row.append(usr)
1906 1905
1907 1906 super_admin_rows = []
1908 1907 if with_admins:
1909 1908 for usr in User.get_all_super_admins():
1910 1909 # if this admin is also owner, don't double the record
1911 1910 if usr.user_id == owner_row[0].user_id:
1912 1911 owner_row[0].admin_row = True
1913 1912 else:
1914 1913 usr = AttributeDict(usr.get_dict())
1915 1914 usr.admin_row = True
1916 1915 usr.permission = _admin_perm
1917 1916 super_admin_rows.append(usr)
1918 1917
1919 1918 return super_admin_rows + owner_row + perm_rows
1920 1919
1921 1920 def permission_user_groups(self):
1922 1921 q = UserGroupRepoToPerm.query().filter(
1923 1922 UserGroupRepoToPerm.repository == self)
1924 1923 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1925 1924 joinedload(UserGroupRepoToPerm.users_group),
1926 1925 joinedload(UserGroupRepoToPerm.permission),)
1927 1926
1928 1927 perm_rows = []
1929 1928 for _user_group in q.all():
1930 1929 usr = AttributeDict(_user_group.users_group.get_dict())
1931 1930 usr.permission = _user_group.permission.permission_name
1932 1931 perm_rows.append(usr)
1933 1932
1934 1933 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1935 1934 return perm_rows
1936 1935
1937 1936 def get_api_data(self, include_secrets=False):
1938 1937 """
1939 1938 Common function for generating repo api data
1940 1939
1941 1940 :param include_secrets: See :meth:`User.get_api_data`.
1942 1941
1943 1942 """
1944 1943 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1945 1944 # move this methods on models level.
1946 1945 from rhodecode.model.settings import SettingsModel
1947 1946 from rhodecode.model.repo import RepoModel
1948 1947
1949 1948 repo = self
1950 1949 _user_id, _time, _reason = self.locked
1951 1950
1952 1951 data = {
1953 1952 'repo_id': repo.repo_id,
1954 1953 'repo_name': repo.repo_name,
1955 1954 'repo_type': repo.repo_type,
1956 1955 'clone_uri': repo.clone_uri or '',
1957 1956 'url': RepoModel().get_url(self),
1958 1957 'private': repo.private,
1959 1958 'created_on': repo.created_on,
1960 1959 'description': repo.description_safe,
1961 1960 'landing_rev': repo.landing_rev,
1962 1961 'owner': repo.user.username,
1963 1962 'fork_of': repo.fork.repo_name if repo.fork else None,
1964 1963 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1965 1964 'enable_statistics': repo.enable_statistics,
1966 1965 'enable_locking': repo.enable_locking,
1967 1966 'enable_downloads': repo.enable_downloads,
1968 1967 'last_changeset': repo.changeset_cache,
1969 1968 'locked_by': User.get(_user_id).get_api_data(
1970 1969 include_secrets=include_secrets) if _user_id else None,
1971 1970 'locked_date': time_to_datetime(_time) if _time else None,
1972 1971 'lock_reason': _reason if _reason else None,
1973 1972 }
1974 1973
1975 1974 # TODO: mikhail: should be per-repo settings here
1976 1975 rc_config = SettingsModel().get_all_settings()
1977 1976 repository_fields = str2bool(
1978 1977 rc_config.get('rhodecode_repository_fields'))
1979 1978 if repository_fields:
1980 1979 for f in self.extra_fields:
1981 1980 data[f.field_key_prefixed] = f.field_value
1982 1981
1983 1982 return data
1984 1983
1985 1984 @classmethod
1986 1985 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
1987 1986 if not lock_time:
1988 1987 lock_time = time.time()
1989 1988 if not lock_reason:
1990 1989 lock_reason = cls.LOCK_AUTOMATIC
1991 1990 repo.locked = [user_id, lock_time, lock_reason]
1992 1991 Session().add(repo)
1993 1992 Session().commit()
1994 1993
1995 1994 @classmethod
1996 1995 def unlock(cls, repo):
1997 1996 repo.locked = None
1998 1997 Session().add(repo)
1999 1998 Session().commit()
2000 1999
2001 2000 @classmethod
2002 2001 def getlock(cls, repo):
2003 2002 return repo.locked
2004 2003
2005 2004 def is_user_lock(self, user_id):
2006 2005 if self.lock[0]:
2007 2006 lock_user_id = safe_int(self.lock[0])
2008 2007 user_id = safe_int(user_id)
2009 2008 # both are ints, and they are equal
2010 2009 return all([lock_user_id, user_id]) and lock_user_id == user_id
2011 2010
2012 2011 return False
2013 2012
2014 2013 def get_locking_state(self, action, user_id, only_when_enabled=True):
2015 2014 """
2016 2015 Checks locking on this repository, if locking is enabled and lock is
2017 2016 present returns a tuple of make_lock, locked, locked_by.
2018 2017 make_lock can have 3 states None (do nothing) True, make lock
2019 2018 False release lock, This value is later propagated to hooks, which
2020 2019 do the locking. Think about this as signals passed to hooks what to do.
2021 2020
2022 2021 """
2023 2022 # TODO: johbo: This is part of the business logic and should be moved
2024 2023 # into the RepositoryModel.
2025 2024
2026 2025 if action not in ('push', 'pull'):
2027 2026 raise ValueError("Invalid action value: %s" % repr(action))
2028 2027
2029 2028 # defines if locked error should be thrown to user
2030 2029 currently_locked = False
2031 2030 # defines if new lock should be made, tri-state
2032 2031 make_lock = None
2033 2032 repo = self
2034 2033 user = User.get(user_id)
2035 2034
2036 2035 lock_info = repo.locked
2037 2036
2038 2037 if repo and (repo.enable_locking or not only_when_enabled):
2039 2038 if action == 'push':
2040 2039 # check if it's already locked !, if it is compare users
2041 2040 locked_by_user_id = lock_info[0]
2042 2041 if user.user_id == locked_by_user_id:
2043 2042 log.debug(
2044 2043 'Got `push` action from user %s, now unlocking', user)
2045 2044 # unlock if we have push from user who locked
2046 2045 make_lock = False
2047 2046 else:
2048 2047 # we're not the same user who locked, ban with
2049 2048 # code defined in settings (default is 423 HTTP Locked) !
2050 2049 log.debug('Repo %s is currently locked by %s', repo, user)
2051 2050 currently_locked = True
2052 2051 elif action == 'pull':
2053 2052 # [0] user [1] date
2054 2053 if lock_info[0] and lock_info[1]:
2055 2054 log.debug('Repo %s is currently locked by %s', repo, user)
2056 2055 currently_locked = True
2057 2056 else:
2058 2057 log.debug('Setting lock on repo %s by %s', repo, user)
2059 2058 make_lock = True
2060 2059
2061 2060 else:
2062 2061 log.debug('Repository %s do not have locking enabled', repo)
2063 2062
2064 2063 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2065 2064 make_lock, currently_locked, lock_info)
2066 2065
2067 2066 from rhodecode.lib.auth import HasRepoPermissionAny
2068 2067 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2069 2068 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2070 2069 # if we don't have at least write permission we cannot make a lock
2071 2070 log.debug('lock state reset back to FALSE due to lack '
2072 2071 'of at least read permission')
2073 2072 make_lock = False
2074 2073
2075 2074 return make_lock, currently_locked, lock_info
2076 2075
2077 2076 @property
2078 2077 def last_db_change(self):
2079 2078 return self.updated_on
2080 2079
2081 2080 @property
2082 2081 def clone_uri_hidden(self):
2083 2082 clone_uri = self.clone_uri
2084 2083 if clone_uri:
2085 2084 import urlobject
2086 2085 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2087 2086 if url_obj.password:
2088 2087 clone_uri = url_obj.with_password('*****')
2089 2088 return clone_uri
2090 2089
2091 2090 def clone_url(self, **override):
2092 2091 from rhodecode.model.settings import SettingsModel
2093 2092
2094 2093 uri_tmpl = None
2095 2094 if 'with_id' in override:
2096 2095 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2097 2096 del override['with_id']
2098 2097
2099 2098 if 'uri_tmpl' in override:
2100 2099 uri_tmpl = override['uri_tmpl']
2101 2100 del override['uri_tmpl']
2102 2101
2103 2102 # we didn't override our tmpl from **overrides
2104 2103 if not uri_tmpl:
2105 2104 rc_config = SettingsModel().get_all_settings(cache=True)
2106 2105 uri_tmpl = rc_config.get(
2107 2106 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2108 2107
2109 2108 request = get_current_request()
2110 2109 return get_clone_url(request=request,
2111 2110 uri_tmpl=uri_tmpl,
2112 2111 repo_name=self.repo_name,
2113 2112 repo_id=self.repo_id, **override)
2114 2113
2115 2114 def set_state(self, state):
2116 2115 self.repo_state = state
2117 2116 Session().add(self)
2118 2117 #==========================================================================
2119 2118 # SCM PROPERTIES
2120 2119 #==========================================================================
2121 2120
2122 2121 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2123 2122 return get_commit_safe(
2124 2123 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2125 2124
2126 2125 def get_changeset(self, rev=None, pre_load=None):
2127 2126 warnings.warn("Use get_commit", DeprecationWarning)
2128 2127 commit_id = None
2129 2128 commit_idx = None
2130 2129 if isinstance(rev, basestring):
2131 2130 commit_id = rev
2132 2131 else:
2133 2132 commit_idx = rev
2134 2133 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2135 2134 pre_load=pre_load)
2136 2135
2137 2136 def get_landing_commit(self):
2138 2137 """
2139 2138 Returns landing commit, or if that doesn't exist returns the tip
2140 2139 """
2141 2140 _rev_type, _rev = self.landing_rev
2142 2141 commit = self.get_commit(_rev)
2143 2142 if isinstance(commit, EmptyCommit):
2144 2143 return self.get_commit()
2145 2144 return commit
2146 2145
2147 2146 def update_commit_cache(self, cs_cache=None, config=None):
2148 2147 """
2149 2148 Update cache of last changeset for repository, keys should be::
2150 2149
2151 2150 short_id
2152 2151 raw_id
2153 2152 revision
2154 2153 parents
2155 2154 message
2156 2155 date
2157 2156 author
2158 2157
2159 2158 :param cs_cache:
2160 2159 """
2161 2160 from rhodecode.lib.vcs.backends.base import BaseChangeset
2162 2161 if cs_cache is None:
2163 2162 # use no-cache version here
2164 2163 scm_repo = self.scm_instance(cache=False, config=config)
2165 2164 if scm_repo:
2166 2165 cs_cache = scm_repo.get_commit(
2167 2166 pre_load=["author", "date", "message", "parents"])
2168 2167 else:
2169 2168 cs_cache = EmptyCommit()
2170 2169
2171 2170 if isinstance(cs_cache, BaseChangeset):
2172 2171 cs_cache = cs_cache.__json__()
2173 2172
2174 2173 def is_outdated(new_cs_cache):
2175 2174 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2176 2175 new_cs_cache['revision'] != self.changeset_cache['revision']):
2177 2176 return True
2178 2177 return False
2179 2178
2180 2179 # check if we have maybe already latest cached revision
2181 2180 if is_outdated(cs_cache) or not self.changeset_cache:
2182 2181 _default = datetime.datetime.fromtimestamp(0)
2183 2182 last_change = cs_cache.get('date') or _default
2184 2183 log.debug('updated repo %s with new cs cache %s',
2185 2184 self.repo_name, cs_cache)
2186 2185 self.updated_on = last_change
2187 2186 self.changeset_cache = cs_cache
2188 2187 Session().add(self)
2189 2188 Session().commit()
2190 2189 else:
2191 2190 log.debug('Skipping update_commit_cache for repo:`%s` '
2192 2191 'commit already with latest changes', self.repo_name)
2193 2192
2194 2193 @property
2195 2194 def tip(self):
2196 2195 return self.get_commit('tip')
2197 2196
2198 2197 @property
2199 2198 def author(self):
2200 2199 return self.tip.author
2201 2200
2202 2201 @property
2203 2202 def last_change(self):
2204 2203 return self.scm_instance().last_change
2205 2204
2206 2205 def get_comments(self, revisions=None):
2207 2206 """
2208 2207 Returns comments for this repository grouped by revisions
2209 2208
2210 2209 :param revisions: filter query by revisions only
2211 2210 """
2212 2211 cmts = ChangesetComment.query()\
2213 2212 .filter(ChangesetComment.repo == self)
2214 2213 if revisions:
2215 2214 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2216 2215 grouped = collections.defaultdict(list)
2217 2216 for cmt in cmts.all():
2218 2217 grouped[cmt.revision].append(cmt)
2219 2218 return grouped
2220 2219
2221 2220 def statuses(self, revisions=None):
2222 2221 """
2223 2222 Returns statuses for this repository
2224 2223
2225 2224 :param revisions: list of revisions to get statuses for
2226 2225 """
2227 2226 statuses = ChangesetStatus.query()\
2228 2227 .filter(ChangesetStatus.repo == self)\
2229 2228 .filter(ChangesetStatus.version == 0)
2230 2229
2231 2230 if revisions:
2232 2231 # Try doing the filtering in chunks to avoid hitting limits
2233 2232 size = 500
2234 2233 status_results = []
2235 2234 for chunk in xrange(0, len(revisions), size):
2236 2235 status_results += statuses.filter(
2237 2236 ChangesetStatus.revision.in_(
2238 2237 revisions[chunk: chunk+size])
2239 2238 ).all()
2240 2239 else:
2241 2240 status_results = statuses.all()
2242 2241
2243 2242 grouped = {}
2244 2243
2245 2244 # maybe we have open new pullrequest without a status?
2246 2245 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2247 2246 status_lbl = ChangesetStatus.get_status_lbl(stat)
2248 2247 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2249 2248 for rev in pr.revisions:
2250 2249 pr_id = pr.pull_request_id
2251 2250 pr_repo = pr.target_repo.repo_name
2252 2251 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2253 2252
2254 2253 for stat in status_results:
2255 2254 pr_id = pr_repo = None
2256 2255 if stat.pull_request:
2257 2256 pr_id = stat.pull_request.pull_request_id
2258 2257 pr_repo = stat.pull_request.target_repo.repo_name
2259 2258 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2260 2259 pr_id, pr_repo]
2261 2260 return grouped
2262 2261
2263 2262 # ==========================================================================
2264 2263 # SCM CACHE INSTANCE
2265 2264 # ==========================================================================
2266 2265
2267 2266 def scm_instance(self, **kwargs):
2268 2267 import rhodecode
2269 2268
2270 2269 # Passing a config will not hit the cache currently only used
2271 2270 # for repo2dbmapper
2272 2271 config = kwargs.pop('config', None)
2273 2272 cache = kwargs.pop('cache', None)
2274 2273 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2275 2274 # if cache is NOT defined use default global, else we have a full
2276 2275 # control over cache behaviour
2277 2276 if cache is None and full_cache and not config:
2278 2277 return self._get_instance_cached()
2279 2278 return self._get_instance(cache=bool(cache), config=config)
2280 2279
2281 2280 def _get_instance_cached(self):
2282 2281 @cache_region('long_term')
2283 2282 def _get_repo(cache_key):
2284 2283 return self._get_instance()
2285 2284
2286 2285 invalidator_context = CacheKey.repo_context_cache(
2287 2286 _get_repo, self.repo_name, None, thread_scoped=True)
2288 2287
2289 2288 with invalidator_context as context:
2290 2289 context.invalidate()
2291 2290 repo = context.compute()
2292 2291
2293 2292 return repo
2294 2293
2295 2294 def _get_instance(self, cache=True, config=None):
2296 2295 config = config or self._config
2297 2296 custom_wire = {
2298 2297 'cache': cache # controls the vcs.remote cache
2299 2298 }
2300 2299 repo = get_vcs_instance(
2301 2300 repo_path=safe_str(self.repo_full_path),
2302 2301 config=config,
2303 2302 with_wire=custom_wire,
2304 2303 create=False,
2305 2304 _vcs_alias=self.repo_type)
2306 2305
2307 2306 return repo
2308 2307
2309 2308 def __json__(self):
2310 2309 return {'landing_rev': self.landing_rev}
2311 2310
2312 2311 def get_dict(self):
2313 2312
2314 2313 # Since we transformed `repo_name` to a hybrid property, we need to
2315 2314 # keep compatibility with the code which uses `repo_name` field.
2316 2315
2317 2316 result = super(Repository, self).get_dict()
2318 2317 result['repo_name'] = result.pop('_repo_name', None)
2319 2318 return result
2320 2319
2321 2320
2322 2321 class RepoGroup(Base, BaseModel):
2323 2322 __tablename__ = 'groups'
2324 2323 __table_args__ = (
2325 2324 UniqueConstraint('group_name', 'group_parent_id'),
2326 2325 CheckConstraint('group_id != group_parent_id'),
2327 2326 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2328 2327 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2329 2328 )
2330 2329 __mapper_args__ = {'order_by': 'group_name'}
2331 2330
2332 2331 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2333 2332
2334 2333 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2335 2334 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2336 2335 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2337 2336 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2338 2337 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2339 2338 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2340 2339 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2341 2340 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2342 2341 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2343 2342
2344 2343 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2345 2344 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2346 2345 parent_group = relationship('RepoGroup', remote_side=group_id)
2347 2346 user = relationship('User')
2348 2347 integrations = relationship('Integration',
2349 2348 cascade="all, delete, delete-orphan")
2350 2349
2351 2350 def __init__(self, group_name='', parent_group=None):
2352 2351 self.group_name = group_name
2353 2352 self.parent_group = parent_group
2354 2353
2355 2354 def __unicode__(self):
2356 2355 return u"<%s('id:%s:%s')>" % (
2357 2356 self.__class__.__name__, self.group_id, self.group_name)
2358 2357
2359 2358 @hybrid_property
2360 2359 def description_safe(self):
2361 2360 from rhodecode.lib import helpers as h
2362 2361 return h.escape(self.group_description)
2363 2362
2364 2363 @classmethod
2365 2364 def _generate_choice(cls, repo_group):
2366 2365 from webhelpers.html import literal as _literal
2367 2366 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2368 2367 return repo_group.group_id, _name(repo_group.full_path_splitted)
2369 2368
2370 2369 @classmethod
2371 2370 def groups_choices(cls, groups=None, show_empty_group=True):
2372 2371 if not groups:
2373 2372 groups = cls.query().all()
2374 2373
2375 2374 repo_groups = []
2376 2375 if show_empty_group:
2377 2376 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2378 2377
2379 2378 repo_groups.extend([cls._generate_choice(x) for x in groups])
2380 2379
2381 2380 repo_groups = sorted(
2382 2381 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2383 2382 return repo_groups
2384 2383
2385 2384 @classmethod
2386 2385 def url_sep(cls):
2387 2386 return URL_SEP
2388 2387
2389 2388 @classmethod
2390 2389 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2391 2390 if case_insensitive:
2392 2391 gr = cls.query().filter(func.lower(cls.group_name)
2393 2392 == func.lower(group_name))
2394 2393 else:
2395 2394 gr = cls.query().filter(cls.group_name == group_name)
2396 2395 if cache:
2397 2396 name_key = _hash_key(group_name)
2398 2397 gr = gr.options(
2399 2398 FromCache("sql_cache_short", "get_group_%s" % name_key))
2400 2399 return gr.scalar()
2401 2400
2402 2401 @classmethod
2403 2402 def get_user_personal_repo_group(cls, user_id):
2404 2403 user = User.get(user_id)
2405 2404 if user.username == User.DEFAULT_USER:
2406 2405 return None
2407 2406
2408 2407 return cls.query()\
2409 2408 .filter(cls.personal == true()) \
2410 2409 .filter(cls.user == user).scalar()
2411 2410
2412 2411 @classmethod
2413 2412 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2414 2413 case_insensitive=True):
2415 2414 q = RepoGroup.query()
2416 2415
2417 2416 if not isinstance(user_id, Optional):
2418 2417 q = q.filter(RepoGroup.user_id == user_id)
2419 2418
2420 2419 if not isinstance(group_id, Optional):
2421 2420 q = q.filter(RepoGroup.group_parent_id == group_id)
2422 2421
2423 2422 if case_insensitive:
2424 2423 q = q.order_by(func.lower(RepoGroup.group_name))
2425 2424 else:
2426 2425 q = q.order_by(RepoGroup.group_name)
2427 2426 return q.all()
2428 2427
2429 2428 @property
2430 2429 def parents(self):
2431 2430 parents_recursion_limit = 10
2432 2431 groups = []
2433 2432 if self.parent_group is None:
2434 2433 return groups
2435 2434 cur_gr = self.parent_group
2436 2435 groups.insert(0, cur_gr)
2437 2436 cnt = 0
2438 2437 while 1:
2439 2438 cnt += 1
2440 2439 gr = getattr(cur_gr, 'parent_group', None)
2441 2440 cur_gr = cur_gr.parent_group
2442 2441 if gr is None:
2443 2442 break
2444 2443 if cnt == parents_recursion_limit:
2445 2444 # this will prevent accidental infinit loops
2446 2445 log.error(('more than %s parents found for group %s, stopping '
2447 2446 'recursive parent fetching' % (parents_recursion_limit, self)))
2448 2447 break
2449 2448
2450 2449 groups.insert(0, gr)
2451 2450 return groups
2452 2451
2453 2452 @property
2454 2453 def last_db_change(self):
2455 2454 return self.updated_on
2456 2455
2457 2456 @property
2458 2457 def children(self):
2459 2458 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2460 2459
2461 2460 @property
2462 2461 def name(self):
2463 2462 return self.group_name.split(RepoGroup.url_sep())[-1]
2464 2463
2465 2464 @property
2466 2465 def full_path(self):
2467 2466 return self.group_name
2468 2467
2469 2468 @property
2470 2469 def full_path_splitted(self):
2471 2470 return self.group_name.split(RepoGroup.url_sep())
2472 2471
2473 2472 @property
2474 2473 def repositories(self):
2475 2474 return Repository.query()\
2476 2475 .filter(Repository.group == self)\
2477 2476 .order_by(Repository.repo_name)
2478 2477
2479 2478 @property
2480 2479 def repositories_recursive_count(self):
2481 2480 cnt = self.repositories.count()
2482 2481
2483 2482 def children_count(group):
2484 2483 cnt = 0
2485 2484 for child in group.children:
2486 2485 cnt += child.repositories.count()
2487 2486 cnt += children_count(child)
2488 2487 return cnt
2489 2488
2490 2489 return cnt + children_count(self)
2491 2490
2492 2491 def _recursive_objects(self, include_repos=True):
2493 2492 all_ = []
2494 2493
2495 2494 def _get_members(root_gr):
2496 2495 if include_repos:
2497 2496 for r in root_gr.repositories:
2498 2497 all_.append(r)
2499 2498 childs = root_gr.children.all()
2500 2499 if childs:
2501 2500 for gr in childs:
2502 2501 all_.append(gr)
2503 2502 _get_members(gr)
2504 2503
2505 2504 _get_members(self)
2506 2505 return [self] + all_
2507 2506
2508 2507 def recursive_groups_and_repos(self):
2509 2508 """
2510 2509 Recursive return all groups, with repositories in those groups
2511 2510 """
2512 2511 return self._recursive_objects()
2513 2512
2514 2513 def recursive_groups(self):
2515 2514 """
2516 2515 Returns all children groups for this group including children of children
2517 2516 """
2518 2517 return self._recursive_objects(include_repos=False)
2519 2518
2520 2519 def get_new_name(self, group_name):
2521 2520 """
2522 2521 returns new full group name based on parent and new name
2523 2522
2524 2523 :param group_name:
2525 2524 """
2526 2525 path_prefix = (self.parent_group.full_path_splitted if
2527 2526 self.parent_group else [])
2528 2527 return RepoGroup.url_sep().join(path_prefix + [group_name])
2529 2528
2530 2529 def permissions(self, with_admins=True, with_owner=True):
2531 2530 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2532 2531 q = q.options(joinedload(UserRepoGroupToPerm.group),
2533 2532 joinedload(UserRepoGroupToPerm.user),
2534 2533 joinedload(UserRepoGroupToPerm.permission),)
2535 2534
2536 2535 # get owners and admins and permissions. We do a trick of re-writing
2537 2536 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2538 2537 # has a global reference and changing one object propagates to all
2539 2538 # others. This means if admin is also an owner admin_row that change
2540 2539 # would propagate to both objects
2541 2540 perm_rows = []
2542 2541 for _usr in q.all():
2543 2542 usr = AttributeDict(_usr.user.get_dict())
2544 2543 usr.permission = _usr.permission.permission_name
2545 2544 perm_rows.append(usr)
2546 2545
2547 2546 # filter the perm rows by 'default' first and then sort them by
2548 2547 # admin,write,read,none permissions sorted again alphabetically in
2549 2548 # each group
2550 2549 perm_rows = sorted(perm_rows, key=display_user_sort)
2551 2550
2552 2551 _admin_perm = 'group.admin'
2553 2552 owner_row = []
2554 2553 if with_owner:
2555 2554 usr = AttributeDict(self.user.get_dict())
2556 2555 usr.owner_row = True
2557 2556 usr.permission = _admin_perm
2558 2557 owner_row.append(usr)
2559 2558
2560 2559 super_admin_rows = []
2561 2560 if with_admins:
2562 2561 for usr in User.get_all_super_admins():
2563 2562 # if this admin is also owner, don't double the record
2564 2563 if usr.user_id == owner_row[0].user_id:
2565 2564 owner_row[0].admin_row = True
2566 2565 else:
2567 2566 usr = AttributeDict(usr.get_dict())
2568 2567 usr.admin_row = True
2569 2568 usr.permission = _admin_perm
2570 2569 super_admin_rows.append(usr)
2571 2570
2572 2571 return super_admin_rows + owner_row + perm_rows
2573 2572
2574 2573 def permission_user_groups(self):
2575 2574 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2576 2575 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2577 2576 joinedload(UserGroupRepoGroupToPerm.users_group),
2578 2577 joinedload(UserGroupRepoGroupToPerm.permission),)
2579 2578
2580 2579 perm_rows = []
2581 2580 for _user_group in q.all():
2582 2581 usr = AttributeDict(_user_group.users_group.get_dict())
2583 2582 usr.permission = _user_group.permission.permission_name
2584 2583 perm_rows.append(usr)
2585 2584
2586 2585 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2587 2586 return perm_rows
2588 2587
2589 2588 def get_api_data(self):
2590 2589 """
2591 2590 Common function for generating api data
2592 2591
2593 2592 """
2594 2593 group = self
2595 2594 data = {
2596 2595 'group_id': group.group_id,
2597 2596 'group_name': group.group_name,
2598 2597 'group_description': group.description_safe,
2599 2598 'parent_group': group.parent_group.group_name if group.parent_group else None,
2600 2599 'repositories': [x.repo_name for x in group.repositories],
2601 2600 'owner': group.user.username,
2602 2601 }
2603 2602 return data
2604 2603
2605 2604
2606 2605 class Permission(Base, BaseModel):
2607 2606 __tablename__ = 'permissions'
2608 2607 __table_args__ = (
2609 2608 Index('p_perm_name_idx', 'permission_name'),
2610 2609 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2611 2610 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
2612 2611 )
2613 2612 PERMS = [
2614 2613 ('hg.admin', _('RhodeCode Super Administrator')),
2615 2614
2616 2615 ('repository.none', _('Repository no access')),
2617 2616 ('repository.read', _('Repository read access')),
2618 2617 ('repository.write', _('Repository write access')),
2619 2618 ('repository.admin', _('Repository admin access')),
2620 2619
2621 2620 ('group.none', _('Repository group no access')),
2622 2621 ('group.read', _('Repository group read access')),
2623 2622 ('group.write', _('Repository group write access')),
2624 2623 ('group.admin', _('Repository group admin access')),
2625 2624
2626 2625 ('usergroup.none', _('User group no access')),
2627 2626 ('usergroup.read', _('User group read access')),
2628 2627 ('usergroup.write', _('User group write access')),
2629 2628 ('usergroup.admin', _('User group admin access')),
2630 2629
2631 2630 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2632 2631 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2633 2632
2634 2633 ('hg.usergroup.create.false', _('User Group creation disabled')),
2635 2634 ('hg.usergroup.create.true', _('User Group creation enabled')),
2636 2635
2637 2636 ('hg.create.none', _('Repository creation disabled')),
2638 2637 ('hg.create.repository', _('Repository creation enabled')),
2639 2638 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2640 2639 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2641 2640
2642 2641 ('hg.fork.none', _('Repository forking disabled')),
2643 2642 ('hg.fork.repository', _('Repository forking enabled')),
2644 2643
2645 2644 ('hg.register.none', _('Registration disabled')),
2646 2645 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2647 2646 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2648 2647
2649 2648 ('hg.password_reset.enabled', _('Password reset enabled')),
2650 2649 ('hg.password_reset.hidden', _('Password reset hidden')),
2651 2650 ('hg.password_reset.disabled', _('Password reset disabled')),
2652 2651
2653 2652 ('hg.extern_activate.manual', _('Manual activation of external account')),
2654 2653 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2655 2654
2656 2655 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2657 2656 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2658 2657 ]
2659 2658
2660 2659 # definition of system default permissions for DEFAULT user
2661 2660 DEFAULT_USER_PERMISSIONS = [
2662 2661 'repository.read',
2663 2662 'group.read',
2664 2663 'usergroup.read',
2665 2664 'hg.create.repository',
2666 2665 'hg.repogroup.create.false',
2667 2666 'hg.usergroup.create.false',
2668 2667 'hg.create.write_on_repogroup.true',
2669 2668 'hg.fork.repository',
2670 2669 'hg.register.manual_activate',
2671 2670 'hg.password_reset.enabled',
2672 2671 'hg.extern_activate.auto',
2673 2672 'hg.inherit_default_perms.true',
2674 2673 ]
2675 2674
2676 2675 # defines which permissions are more important higher the more important
2677 2676 # Weight defines which permissions are more important.
2678 2677 # The higher number the more important.
2679 2678 PERM_WEIGHTS = {
2680 2679 'repository.none': 0,
2681 2680 'repository.read': 1,
2682 2681 'repository.write': 3,
2683 2682 'repository.admin': 4,
2684 2683
2685 2684 'group.none': 0,
2686 2685 'group.read': 1,
2687 2686 'group.write': 3,
2688 2687 'group.admin': 4,
2689 2688
2690 2689 'usergroup.none': 0,
2691 2690 'usergroup.read': 1,
2692 2691 'usergroup.write': 3,
2693 2692 'usergroup.admin': 4,
2694 2693
2695 2694 'hg.repogroup.create.false': 0,
2696 2695 'hg.repogroup.create.true': 1,
2697 2696
2698 2697 'hg.usergroup.create.false': 0,
2699 2698 'hg.usergroup.create.true': 1,
2700 2699
2701 2700 'hg.fork.none': 0,
2702 2701 'hg.fork.repository': 1,
2703 2702 'hg.create.none': 0,
2704 2703 'hg.create.repository': 1
2705 2704 }
2706 2705
2707 2706 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2708 2707 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2709 2708 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2710 2709
2711 2710 def __unicode__(self):
2712 2711 return u"<%s('%s:%s')>" % (
2713 2712 self.__class__.__name__, self.permission_id, self.permission_name
2714 2713 )
2715 2714
2716 2715 @classmethod
2717 2716 def get_by_key(cls, key):
2718 2717 return cls.query().filter(cls.permission_name == key).scalar()
2719 2718
2720 2719 @classmethod
2721 2720 def get_default_repo_perms(cls, user_id, repo_id=None):
2722 2721 q = Session().query(UserRepoToPerm, Repository, Permission)\
2723 2722 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2724 2723 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2725 2724 .filter(UserRepoToPerm.user_id == user_id)
2726 2725 if repo_id:
2727 2726 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2728 2727 return q.all()
2729 2728
2730 2729 @classmethod
2731 2730 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2732 2731 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2733 2732 .join(
2734 2733 Permission,
2735 2734 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2736 2735 .join(
2737 2736 Repository,
2738 2737 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2739 2738 .join(
2740 2739 UserGroup,
2741 2740 UserGroupRepoToPerm.users_group_id ==
2742 2741 UserGroup.users_group_id)\
2743 2742 .join(
2744 2743 UserGroupMember,
2745 2744 UserGroupRepoToPerm.users_group_id ==
2746 2745 UserGroupMember.users_group_id)\
2747 2746 .filter(
2748 2747 UserGroupMember.user_id == user_id,
2749 2748 UserGroup.users_group_active == true())
2750 2749 if repo_id:
2751 2750 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2752 2751 return q.all()
2753 2752
2754 2753 @classmethod
2755 2754 def get_default_group_perms(cls, user_id, repo_group_id=None):
2756 2755 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2757 2756 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2758 2757 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2759 2758 .filter(UserRepoGroupToPerm.user_id == user_id)
2760 2759 if repo_group_id:
2761 2760 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2762 2761 return q.all()
2763 2762
2764 2763 @classmethod
2765 2764 def get_default_group_perms_from_user_group(
2766 2765 cls, user_id, repo_group_id=None):
2767 2766 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2768 2767 .join(
2769 2768 Permission,
2770 2769 UserGroupRepoGroupToPerm.permission_id ==
2771 2770 Permission.permission_id)\
2772 2771 .join(
2773 2772 RepoGroup,
2774 2773 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2775 2774 .join(
2776 2775 UserGroup,
2777 2776 UserGroupRepoGroupToPerm.users_group_id ==
2778 2777 UserGroup.users_group_id)\
2779 2778 .join(
2780 2779 UserGroupMember,
2781 2780 UserGroupRepoGroupToPerm.users_group_id ==
2782 2781 UserGroupMember.users_group_id)\
2783 2782 .filter(
2784 2783 UserGroupMember.user_id == user_id,
2785 2784 UserGroup.users_group_active == true())
2786 2785 if repo_group_id:
2787 2786 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2788 2787 return q.all()
2789 2788
2790 2789 @classmethod
2791 2790 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2792 2791 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2793 2792 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2794 2793 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2795 2794 .filter(UserUserGroupToPerm.user_id == user_id)
2796 2795 if user_group_id:
2797 2796 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2798 2797 return q.all()
2799 2798
2800 2799 @classmethod
2801 2800 def get_default_user_group_perms_from_user_group(
2802 2801 cls, user_id, user_group_id=None):
2803 2802 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2804 2803 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2805 2804 .join(
2806 2805 Permission,
2807 2806 UserGroupUserGroupToPerm.permission_id ==
2808 2807 Permission.permission_id)\
2809 2808 .join(
2810 2809 TargetUserGroup,
2811 2810 UserGroupUserGroupToPerm.target_user_group_id ==
2812 2811 TargetUserGroup.users_group_id)\
2813 2812 .join(
2814 2813 UserGroup,
2815 2814 UserGroupUserGroupToPerm.user_group_id ==
2816 2815 UserGroup.users_group_id)\
2817 2816 .join(
2818 2817 UserGroupMember,
2819 2818 UserGroupUserGroupToPerm.user_group_id ==
2820 2819 UserGroupMember.users_group_id)\
2821 2820 .filter(
2822 2821 UserGroupMember.user_id == user_id,
2823 2822 UserGroup.users_group_active == true())
2824 2823 if user_group_id:
2825 2824 q = q.filter(
2826 2825 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2827 2826
2828 2827 return q.all()
2829 2828
2830 2829
2831 2830 class UserRepoToPerm(Base, BaseModel):
2832 2831 __tablename__ = 'repo_to_perm'
2833 2832 __table_args__ = (
2834 2833 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2835 2834 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2836 2835 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2837 2836 )
2838 2837 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2839 2838 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2840 2839 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2841 2840 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2842 2841
2843 2842 user = relationship('User')
2844 2843 repository = relationship('Repository')
2845 2844 permission = relationship('Permission')
2846 2845
2847 2846 @classmethod
2848 2847 def create(cls, user, repository, permission):
2849 2848 n = cls()
2850 2849 n.user = user
2851 2850 n.repository = repository
2852 2851 n.permission = permission
2853 2852 Session().add(n)
2854 2853 return n
2855 2854
2856 2855 def __unicode__(self):
2857 2856 return u'<%s => %s >' % (self.user, self.repository)
2858 2857
2859 2858
2860 2859 class UserUserGroupToPerm(Base, BaseModel):
2861 2860 __tablename__ = 'user_user_group_to_perm'
2862 2861 __table_args__ = (
2863 2862 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2864 2863 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2865 2864 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2866 2865 )
2867 2866 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2868 2867 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2869 2868 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2870 2869 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2871 2870
2872 2871 user = relationship('User')
2873 2872 user_group = relationship('UserGroup')
2874 2873 permission = relationship('Permission')
2875 2874
2876 2875 @classmethod
2877 2876 def create(cls, user, user_group, permission):
2878 2877 n = cls()
2879 2878 n.user = user
2880 2879 n.user_group = user_group
2881 2880 n.permission = permission
2882 2881 Session().add(n)
2883 2882 return n
2884 2883
2885 2884 def __unicode__(self):
2886 2885 return u'<%s => %s >' % (self.user, self.user_group)
2887 2886
2888 2887
2889 2888 class UserToPerm(Base, BaseModel):
2890 2889 __tablename__ = 'user_to_perm'
2891 2890 __table_args__ = (
2892 2891 UniqueConstraint('user_id', 'permission_id'),
2893 2892 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2894 2893 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2895 2894 )
2896 2895 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2897 2896 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2898 2897 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2899 2898
2900 2899 user = relationship('User')
2901 2900 permission = relationship('Permission', lazy='joined')
2902 2901
2903 2902 def __unicode__(self):
2904 2903 return u'<%s => %s >' % (self.user, self.permission)
2905 2904
2906 2905
2907 2906 class UserGroupRepoToPerm(Base, BaseModel):
2908 2907 __tablename__ = 'users_group_repo_to_perm'
2909 2908 __table_args__ = (
2910 2909 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2911 2910 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2912 2911 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2913 2912 )
2914 2913 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2915 2914 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2916 2915 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2917 2916 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2918 2917
2919 2918 users_group = relationship('UserGroup')
2920 2919 permission = relationship('Permission')
2921 2920 repository = relationship('Repository')
2922 2921
2923 2922 @classmethod
2924 2923 def create(cls, users_group, repository, permission):
2925 2924 n = cls()
2926 2925 n.users_group = users_group
2927 2926 n.repository = repository
2928 2927 n.permission = permission
2929 2928 Session().add(n)
2930 2929 return n
2931 2930
2932 2931 def __unicode__(self):
2933 2932 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2934 2933
2935 2934
2936 2935 class UserGroupUserGroupToPerm(Base, BaseModel):
2937 2936 __tablename__ = 'user_group_user_group_to_perm'
2938 2937 __table_args__ = (
2939 2938 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
2940 2939 CheckConstraint('target_user_group_id != user_group_id'),
2941 2940 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2942 2941 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2943 2942 )
2944 2943 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2945 2944 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2946 2945 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2947 2946 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2948 2947
2949 2948 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
2950 2949 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
2951 2950 permission = relationship('Permission')
2952 2951
2953 2952 @classmethod
2954 2953 def create(cls, target_user_group, user_group, permission):
2955 2954 n = cls()
2956 2955 n.target_user_group = target_user_group
2957 2956 n.user_group = user_group
2958 2957 n.permission = permission
2959 2958 Session().add(n)
2960 2959 return n
2961 2960
2962 2961 def __unicode__(self):
2963 2962 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
2964 2963
2965 2964
2966 2965 class UserGroupToPerm(Base, BaseModel):
2967 2966 __tablename__ = 'users_group_to_perm'
2968 2967 __table_args__ = (
2969 2968 UniqueConstraint('users_group_id', 'permission_id',),
2970 2969 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2971 2970 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2972 2971 )
2973 2972 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2974 2973 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2975 2974 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2976 2975
2977 2976 users_group = relationship('UserGroup')
2978 2977 permission = relationship('Permission')
2979 2978
2980 2979
2981 2980 class UserRepoGroupToPerm(Base, BaseModel):
2982 2981 __tablename__ = 'user_repo_group_to_perm'
2983 2982 __table_args__ = (
2984 2983 UniqueConstraint('user_id', 'group_id', 'permission_id'),
2985 2984 {'extend_existing': True, 'mysql_engine': 'InnoDB',
2986 2985 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
2987 2986 )
2988 2987
2989 2988 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2990 2989 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2991 2990 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
2992 2991 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2993 2992
2994 2993 user = relationship('User')
2995 2994 group = relationship('RepoGroup')
2996 2995 permission = relationship('Permission')
2997 2996
2998 2997 @classmethod
2999 2998 def create(cls, user, repository_group, permission):
3000 2999 n = cls()
3001 3000 n.user = user
3002 3001 n.group = repository_group
3003 3002 n.permission = permission
3004 3003 Session().add(n)
3005 3004 return n
3006 3005
3007 3006
3008 3007 class UserGroupRepoGroupToPerm(Base, BaseModel):
3009 3008 __tablename__ = 'users_group_repo_group_to_perm'
3010 3009 __table_args__ = (
3011 3010 UniqueConstraint('users_group_id', 'group_id'),
3012 3011 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3013 3012 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3014 3013 )
3015 3014
3016 3015 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3017 3016 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3018 3017 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3019 3018 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3020 3019
3021 3020 users_group = relationship('UserGroup')
3022 3021 permission = relationship('Permission')
3023 3022 group = relationship('RepoGroup')
3024 3023
3025 3024 @classmethod
3026 3025 def create(cls, user_group, repository_group, permission):
3027 3026 n = cls()
3028 3027 n.users_group = user_group
3029 3028 n.group = repository_group
3030 3029 n.permission = permission
3031 3030 Session().add(n)
3032 3031 return n
3033 3032
3034 3033 def __unicode__(self):
3035 3034 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3036 3035
3037 3036
3038 3037 class Statistics(Base, BaseModel):
3039 3038 __tablename__ = 'statistics'
3040 3039 __table_args__ = (
3041 3040 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3042 3041 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3043 3042 )
3044 3043 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3045 3044 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3046 3045 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3047 3046 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3048 3047 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3049 3048 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3050 3049
3051 3050 repository = relationship('Repository', single_parent=True)
3052 3051
3053 3052
3054 3053 class UserFollowing(Base, BaseModel):
3055 3054 __tablename__ = 'user_followings'
3056 3055 __table_args__ = (
3057 3056 UniqueConstraint('user_id', 'follows_repository_id'),
3058 3057 UniqueConstraint('user_id', 'follows_user_id'),
3059 3058 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3060 3059 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3061 3060 )
3062 3061
3063 3062 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3064 3063 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3065 3064 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3066 3065 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3067 3066 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3068 3067
3069 3068 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3070 3069
3071 3070 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3072 3071 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3073 3072
3074 3073 @classmethod
3075 3074 def get_repo_followers(cls, repo_id):
3076 3075 return cls.query().filter(cls.follows_repo_id == repo_id)
3077 3076
3078 3077
3079 3078 class CacheKey(Base, BaseModel):
3080 3079 __tablename__ = 'cache_invalidation'
3081 3080 __table_args__ = (
3082 3081 UniqueConstraint('cache_key'),
3083 3082 Index('key_idx', 'cache_key'),
3084 3083 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3085 3084 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3086 3085 )
3087 3086 CACHE_TYPE_ATOM = 'ATOM'
3088 3087 CACHE_TYPE_RSS = 'RSS'
3089 3088 CACHE_TYPE_README = 'README'
3090 3089
3091 3090 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3092 3091 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3093 3092 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3094 3093 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3095 3094
3096 3095 def __init__(self, cache_key, cache_args=''):
3097 3096 self.cache_key = cache_key
3098 3097 self.cache_args = cache_args
3099 3098 self.cache_active = False
3100 3099
3101 3100 def __unicode__(self):
3102 3101 return u"<%s('%s:%s[%s]')>" % (
3103 3102 self.__class__.__name__,
3104 3103 self.cache_id, self.cache_key, self.cache_active)
3105 3104
3106 3105 def _cache_key_partition(self):
3107 3106 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3108 3107 return prefix, repo_name, suffix
3109 3108
3110 3109 def get_prefix(self):
3111 3110 """
3112 3111 Try to extract prefix from existing cache key. The key could consist
3113 3112 of prefix, repo_name, suffix
3114 3113 """
3115 3114 # this returns prefix, repo_name, suffix
3116 3115 return self._cache_key_partition()[0]
3117 3116
3118 3117 def get_suffix(self):
3119 3118 """
3120 3119 get suffix that might have been used in _get_cache_key to
3121 3120 generate self.cache_key. Only used for informational purposes
3122 3121 in repo_edit.mako.
3123 3122 """
3124 3123 # prefix, repo_name, suffix
3125 3124 return self._cache_key_partition()[2]
3126 3125
3127 3126 @classmethod
3128 3127 def delete_all_cache(cls):
3129 3128 """
3130 3129 Delete all cache keys from database.
3131 3130 Should only be run when all instances are down and all entries
3132 3131 thus stale.
3133 3132 """
3134 3133 cls.query().delete()
3135 3134 Session().commit()
3136 3135
3137 3136 @classmethod
3138 3137 def get_cache_key(cls, repo_name, cache_type):
3139 3138 """
3140 3139
3141 3140 Generate a cache key for this process of RhodeCode instance.
3142 3141 Prefix most likely will be process id or maybe explicitly set
3143 3142 instance_id from .ini file.
3144 3143 """
3145 3144 import rhodecode
3146 3145 prefix = safe_unicode(rhodecode.CONFIG.get('instance_id') or '')
3147 3146
3148 3147 repo_as_unicode = safe_unicode(repo_name)
3149 3148 key = u'{}_{}'.format(repo_as_unicode, cache_type) \
3150 3149 if cache_type else repo_as_unicode
3151 3150
3152 3151 return u'{}{}'.format(prefix, key)
3153 3152
3154 3153 @classmethod
3155 3154 def set_invalidate(cls, repo_name, delete=False):
3156 3155 """
3157 3156 Mark all caches of a repo as invalid in the database.
3158 3157 """
3159 3158
3160 3159 try:
3161 3160 qry = Session().query(cls).filter(cls.cache_args == repo_name)
3162 3161 if delete:
3163 3162 log.debug('cache objects deleted for repo %s',
3164 3163 safe_str(repo_name))
3165 3164 qry.delete()
3166 3165 else:
3167 3166 log.debug('cache objects marked as invalid for repo %s',
3168 3167 safe_str(repo_name))
3169 3168 qry.update({"cache_active": False})
3170 3169
3171 3170 Session().commit()
3172 3171 except Exception:
3173 3172 log.exception(
3174 3173 'Cache key invalidation failed for repository %s',
3175 3174 safe_str(repo_name))
3176 3175 Session().rollback()
3177 3176
3178 3177 @classmethod
3179 3178 def get_active_cache(cls, cache_key):
3180 3179 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3181 3180 if inv_obj:
3182 3181 return inv_obj
3183 3182 return None
3184 3183
3185 3184 @classmethod
3186 3185 def repo_context_cache(cls, compute_func, repo_name, cache_type,
3187 3186 thread_scoped=False):
3188 3187 """
3189 3188 @cache_region('long_term')
3190 3189 def _heavy_calculation(cache_key):
3191 3190 return 'result'
3192 3191
3193 3192 cache_context = CacheKey.repo_context_cache(
3194 3193 _heavy_calculation, repo_name, cache_type)
3195 3194
3196 3195 with cache_context as context:
3197 3196 context.invalidate()
3198 3197 computed = context.compute()
3199 3198
3200 3199 assert computed == 'result'
3201 3200 """
3202 3201 from rhodecode.lib import caches
3203 3202 return caches.InvalidationContext(
3204 3203 compute_func, repo_name, cache_type, thread_scoped=thread_scoped)
3205 3204
3206 3205
3207 3206 class ChangesetComment(Base, BaseModel):
3208 3207 __tablename__ = 'changeset_comments'
3209 3208 __table_args__ = (
3210 3209 Index('cc_revision_idx', 'revision'),
3211 3210 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3212 3211 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3213 3212 )
3214 3213
3215 3214 COMMENT_OUTDATED = u'comment_outdated'
3216 3215 COMMENT_TYPE_NOTE = u'note'
3217 3216 COMMENT_TYPE_TODO = u'todo'
3218 3217 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3219 3218
3220 3219 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3221 3220 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3222 3221 revision = Column('revision', String(40), nullable=True)
3223 3222 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3224 3223 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3225 3224 line_no = Column('line_no', Unicode(10), nullable=True)
3226 3225 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3227 3226 f_path = Column('f_path', Unicode(1000), nullable=True)
3228 3227 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3229 3228 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3230 3229 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3231 3230 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3232 3231 renderer = Column('renderer', Unicode(64), nullable=True)
3233 3232 display_state = Column('display_state', Unicode(128), nullable=True)
3234 3233
3235 3234 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3236 3235 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3237 3236 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3238 3237 author = relationship('User', lazy='joined')
3239 3238 repo = relationship('Repository')
3240 3239 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3241 3240 pull_request = relationship('PullRequest', lazy='joined')
3242 3241 pull_request_version = relationship('PullRequestVersion')
3243 3242
3244 3243 @classmethod
3245 3244 def get_users(cls, revision=None, pull_request_id=None):
3246 3245 """
3247 3246 Returns user associated with this ChangesetComment. ie those
3248 3247 who actually commented
3249 3248
3250 3249 :param cls:
3251 3250 :param revision:
3252 3251 """
3253 3252 q = Session().query(User)\
3254 3253 .join(ChangesetComment.author)
3255 3254 if revision:
3256 3255 q = q.filter(cls.revision == revision)
3257 3256 elif pull_request_id:
3258 3257 q = q.filter(cls.pull_request_id == pull_request_id)
3259 3258 return q.all()
3260 3259
3261 3260 @classmethod
3262 3261 def get_index_from_version(cls, pr_version, versions):
3263 3262 num_versions = [x.pull_request_version_id for x in versions]
3264 3263 try:
3265 3264 return num_versions.index(pr_version) +1
3266 3265 except (IndexError, ValueError):
3267 3266 return
3268 3267
3269 3268 @property
3270 3269 def outdated(self):
3271 3270 return self.display_state == self.COMMENT_OUTDATED
3272 3271
3273 3272 def outdated_at_version(self, version):
3274 3273 """
3275 3274 Checks if comment is outdated for given pull request version
3276 3275 """
3277 3276 return self.outdated and self.pull_request_version_id != version
3278 3277
3279 3278 def older_than_version(self, version):
3280 3279 """
3281 3280 Checks if comment is made from previous version than given
3282 3281 """
3283 3282 if version is None:
3284 3283 return self.pull_request_version_id is not None
3285 3284
3286 3285 return self.pull_request_version_id < version
3287 3286
3288 3287 @property
3289 3288 def resolved(self):
3290 3289 return self.resolved_by[0] if self.resolved_by else None
3291 3290
3292 3291 @property
3293 3292 def is_todo(self):
3294 3293 return self.comment_type == self.COMMENT_TYPE_TODO
3295 3294
3296 3295 @property
3297 3296 def is_inline(self):
3298 3297 return self.line_no and self.f_path
3299 3298
3300 3299 def get_index_version(self, versions):
3301 3300 return self.get_index_from_version(
3302 3301 self.pull_request_version_id, versions)
3303 3302
3304 3303 def __repr__(self):
3305 3304 if self.comment_id:
3306 3305 return '<DB:Comment #%s>' % self.comment_id
3307 3306 else:
3308 3307 return '<DB:Comment at %#x>' % id(self)
3309 3308
3310 3309 def get_api_data(self):
3311 3310 comment = self
3312 3311 data = {
3313 3312 'comment_id': comment.comment_id,
3314 3313 'comment_type': comment.comment_type,
3315 3314 'comment_text': comment.text,
3316 3315 'comment_status': comment.status_change,
3317 3316 'comment_f_path': comment.f_path,
3318 3317 'comment_lineno': comment.line_no,
3319 3318 'comment_author': comment.author,
3320 3319 'comment_created_on': comment.created_on
3321 3320 }
3322 3321 return data
3323 3322
3324 3323 def __json__(self):
3325 3324 data = dict()
3326 3325 data.update(self.get_api_data())
3327 3326 return data
3328 3327
3329 3328
3330 3329 class ChangesetStatus(Base, BaseModel):
3331 3330 __tablename__ = 'changeset_statuses'
3332 3331 __table_args__ = (
3333 3332 Index('cs_revision_idx', 'revision'),
3334 3333 Index('cs_version_idx', 'version'),
3335 3334 UniqueConstraint('repo_id', 'revision', 'version'),
3336 3335 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3337 3336 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3338 3337 )
3339 3338 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3340 3339 STATUS_APPROVED = 'approved'
3341 3340 STATUS_REJECTED = 'rejected'
3342 3341 STATUS_UNDER_REVIEW = 'under_review'
3343 3342
3344 3343 STATUSES = [
3345 3344 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3346 3345 (STATUS_APPROVED, _("Approved")),
3347 3346 (STATUS_REJECTED, _("Rejected")),
3348 3347 (STATUS_UNDER_REVIEW, _("Under Review")),
3349 3348 ]
3350 3349
3351 3350 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3352 3351 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3353 3352 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3354 3353 revision = Column('revision', String(40), nullable=False)
3355 3354 status = Column('status', String(128), nullable=False, default=DEFAULT)
3356 3355 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3357 3356 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3358 3357 version = Column('version', Integer(), nullable=False, default=0)
3359 3358 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3360 3359
3361 3360 author = relationship('User', lazy='joined')
3362 3361 repo = relationship('Repository')
3363 3362 comment = relationship('ChangesetComment', lazy='joined')
3364 3363 pull_request = relationship('PullRequest', lazy='joined')
3365 3364
3366 3365 def __unicode__(self):
3367 3366 return u"<%s('%s[v%s]:%s')>" % (
3368 3367 self.__class__.__name__,
3369 3368 self.status, self.version, self.author
3370 3369 )
3371 3370
3372 3371 @classmethod
3373 3372 def get_status_lbl(cls, value):
3374 3373 return dict(cls.STATUSES).get(value)
3375 3374
3376 3375 @property
3377 3376 def status_lbl(self):
3378 3377 return ChangesetStatus.get_status_lbl(self.status)
3379 3378
3380 3379 def get_api_data(self):
3381 3380 status = self
3382 3381 data = {
3383 3382 'status_id': status.changeset_status_id,
3384 3383 'status': status.status,
3385 3384 }
3386 3385 return data
3387 3386
3388 3387 def __json__(self):
3389 3388 data = dict()
3390 3389 data.update(self.get_api_data())
3391 3390 return data
3392 3391
3393 3392
3394 3393 class _PullRequestBase(BaseModel):
3395 3394 """
3396 3395 Common attributes of pull request and version entries.
3397 3396 """
3398 3397
3399 3398 # .status values
3400 3399 STATUS_NEW = u'new'
3401 3400 STATUS_OPEN = u'open'
3402 3401 STATUS_CLOSED = u'closed'
3403 3402
3404 3403 title = Column('title', Unicode(255), nullable=True)
3405 3404 description = Column(
3406 3405 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3407 3406 nullable=True)
3408 3407 # new/open/closed status of pull request (not approve/reject/etc)
3409 3408 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3410 3409 created_on = Column(
3411 3410 'created_on', DateTime(timezone=False), nullable=False,
3412 3411 default=datetime.datetime.now)
3413 3412 updated_on = Column(
3414 3413 'updated_on', DateTime(timezone=False), nullable=False,
3415 3414 default=datetime.datetime.now)
3416 3415
3417 3416 @declared_attr
3418 3417 def user_id(cls):
3419 3418 return Column(
3420 3419 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3421 3420 unique=None)
3422 3421
3423 3422 # 500 revisions max
3424 3423 _revisions = Column(
3425 3424 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3426 3425
3427 3426 @declared_attr
3428 3427 def source_repo_id(cls):
3429 3428 # TODO: dan: rename column to source_repo_id
3430 3429 return Column(
3431 3430 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3432 3431 nullable=False)
3433 3432
3434 3433 source_ref = Column('org_ref', Unicode(255), nullable=False)
3435 3434
3436 3435 @declared_attr
3437 3436 def target_repo_id(cls):
3438 3437 # TODO: dan: rename column to target_repo_id
3439 3438 return Column(
3440 3439 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3441 3440 nullable=False)
3442 3441
3443 3442 target_ref = Column('other_ref', Unicode(255), nullable=False)
3444 3443 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3445 3444
3446 3445 # TODO: dan: rename column to last_merge_source_rev
3447 3446 _last_merge_source_rev = Column(
3448 3447 'last_merge_org_rev', String(40), nullable=True)
3449 3448 # TODO: dan: rename column to last_merge_target_rev
3450 3449 _last_merge_target_rev = Column(
3451 3450 'last_merge_other_rev', String(40), nullable=True)
3452 3451 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3453 3452 merge_rev = Column('merge_rev', String(40), nullable=True)
3454 3453
3455 3454 reviewer_data = Column(
3456 3455 'reviewer_data_json', MutationObj.as_mutable(
3457 3456 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3458 3457
3459 3458 @property
3460 3459 def reviewer_data_json(self):
3461 3460 return json.dumps(self.reviewer_data)
3462 3461
3463 3462 @hybrid_property
3464 3463 def description_safe(self):
3465 3464 from rhodecode.lib import helpers as h
3466 3465 return h.escape(self.description)
3467 3466
3468 3467 @hybrid_property
3469 3468 def revisions(self):
3470 3469 return self._revisions.split(':') if self._revisions else []
3471 3470
3472 3471 @revisions.setter
3473 3472 def revisions(self, val):
3474 3473 self._revisions = ':'.join(val)
3475 3474
3476 3475 @hybrid_property
3477 3476 def last_merge_status(self):
3478 3477 return safe_int(self._last_merge_status)
3479 3478
3480 3479 @last_merge_status.setter
3481 3480 def last_merge_status(self, val):
3482 3481 self._last_merge_status = val
3483 3482
3484 3483 @declared_attr
3485 3484 def author(cls):
3486 3485 return relationship('User', lazy='joined')
3487 3486
3488 3487 @declared_attr
3489 3488 def source_repo(cls):
3490 3489 return relationship(
3491 3490 'Repository',
3492 3491 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3493 3492
3494 3493 @property
3495 3494 def source_ref_parts(self):
3496 3495 return self.unicode_to_reference(self.source_ref)
3497 3496
3498 3497 @declared_attr
3499 3498 def target_repo(cls):
3500 3499 return relationship(
3501 3500 'Repository',
3502 3501 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3503 3502
3504 3503 @property
3505 3504 def target_ref_parts(self):
3506 3505 return self.unicode_to_reference(self.target_ref)
3507 3506
3508 3507 @property
3509 3508 def shadow_merge_ref(self):
3510 3509 return self.unicode_to_reference(self._shadow_merge_ref)
3511 3510
3512 3511 @shadow_merge_ref.setter
3513 3512 def shadow_merge_ref(self, ref):
3514 3513 self._shadow_merge_ref = self.reference_to_unicode(ref)
3515 3514
3516 3515 def unicode_to_reference(self, raw):
3517 3516 """
3518 3517 Convert a unicode (or string) to a reference object.
3519 3518 If unicode evaluates to False it returns None.
3520 3519 """
3521 3520 if raw:
3522 3521 refs = raw.split(':')
3523 3522 return Reference(*refs)
3524 3523 else:
3525 3524 return None
3526 3525
3527 3526 def reference_to_unicode(self, ref):
3528 3527 """
3529 3528 Convert a reference object to unicode.
3530 3529 If reference is None it returns None.
3531 3530 """
3532 3531 if ref:
3533 3532 return u':'.join(ref)
3534 3533 else:
3535 3534 return None
3536 3535
3537 3536 def get_api_data(self, with_merge_state=True):
3538 3537 from rhodecode.model.pull_request import PullRequestModel
3539 3538
3540 3539 pull_request = self
3541 3540 if with_merge_state:
3542 3541 merge_status = PullRequestModel().merge_status(pull_request)
3543 3542 merge_state = {
3544 3543 'status': merge_status[0],
3545 3544 'message': safe_unicode(merge_status[1]),
3546 3545 }
3547 3546 else:
3548 3547 merge_state = {'status': 'not_available',
3549 3548 'message': 'not_available'}
3550 3549
3551 3550 merge_data = {
3552 3551 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3553 3552 'reference': (
3554 3553 pull_request.shadow_merge_ref._asdict()
3555 3554 if pull_request.shadow_merge_ref else None),
3556 3555 }
3557 3556
3558 3557 data = {
3559 3558 'pull_request_id': pull_request.pull_request_id,
3560 3559 'url': PullRequestModel().get_url(pull_request),
3561 3560 'title': pull_request.title,
3562 3561 'description': pull_request.description,
3563 3562 'status': pull_request.status,
3564 3563 'created_on': pull_request.created_on,
3565 3564 'updated_on': pull_request.updated_on,
3566 3565 'commit_ids': pull_request.revisions,
3567 3566 'review_status': pull_request.calculated_review_status(),
3568 3567 'mergeable': merge_state,
3569 3568 'source': {
3570 3569 'clone_url': pull_request.source_repo.clone_url(),
3571 3570 'repository': pull_request.source_repo.repo_name,
3572 3571 'reference': {
3573 3572 'name': pull_request.source_ref_parts.name,
3574 3573 'type': pull_request.source_ref_parts.type,
3575 3574 'commit_id': pull_request.source_ref_parts.commit_id,
3576 3575 },
3577 3576 },
3578 3577 'target': {
3579 3578 'clone_url': pull_request.target_repo.clone_url(),
3580 3579 'repository': pull_request.target_repo.repo_name,
3581 3580 'reference': {
3582 3581 'name': pull_request.target_ref_parts.name,
3583 3582 'type': pull_request.target_ref_parts.type,
3584 3583 'commit_id': pull_request.target_ref_parts.commit_id,
3585 3584 },
3586 3585 },
3587 3586 'merge': merge_data,
3588 3587 'author': pull_request.author.get_api_data(include_secrets=False,
3589 3588 details='basic'),
3590 3589 'reviewers': [
3591 3590 {
3592 3591 'user': reviewer.get_api_data(include_secrets=False,
3593 3592 details='basic'),
3594 3593 'reasons': reasons,
3595 3594 'review_status': st[0][1].status if st else 'not_reviewed',
3596 3595 }
3597 for reviewer, reasons, mandatory, st in
3596 for obj, reviewer, reasons, mandatory, st in
3598 3597 pull_request.reviewers_statuses()
3599 3598 ]
3600 3599 }
3601 3600
3602 3601 return data
3603 3602
3604 3603
3605 3604 class PullRequest(Base, _PullRequestBase):
3606 3605 __tablename__ = 'pull_requests'
3607 3606 __table_args__ = (
3608 3607 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3609 3608 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3610 3609 )
3611 3610
3612 3611 pull_request_id = Column(
3613 3612 'pull_request_id', Integer(), nullable=False, primary_key=True)
3614 3613
3615 3614 def __repr__(self):
3616 3615 if self.pull_request_id:
3617 3616 return '<DB:PullRequest #%s>' % self.pull_request_id
3618 3617 else:
3619 3618 return '<DB:PullRequest at %#x>' % id(self)
3620 3619
3621 3620 reviewers = relationship('PullRequestReviewers',
3622 3621 cascade="all, delete, delete-orphan")
3623 3622 statuses = relationship('ChangesetStatus',
3624 3623 cascade="all, delete, delete-orphan")
3625 3624 comments = relationship('ChangesetComment',
3626 3625 cascade="all, delete, delete-orphan")
3627 3626 versions = relationship('PullRequestVersion',
3628 3627 cascade="all, delete, delete-orphan",
3629 3628 lazy='dynamic')
3630 3629
3631 3630 @classmethod
3632 3631 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3633 3632 internal_methods=None):
3634 3633
3635 3634 class PullRequestDisplay(object):
3636 3635 """
3637 3636 Special object wrapper for showing PullRequest data via Versions
3638 3637 It mimics PR object as close as possible. This is read only object
3639 3638 just for display
3640 3639 """
3641 3640
3642 3641 def __init__(self, attrs, internal=None):
3643 3642 self.attrs = attrs
3644 3643 # internal have priority over the given ones via attrs
3645 3644 self.internal = internal or ['versions']
3646 3645
3647 3646 def __getattr__(self, item):
3648 3647 if item in self.internal:
3649 3648 return getattr(self, item)
3650 3649 try:
3651 3650 return self.attrs[item]
3652 3651 except KeyError:
3653 3652 raise AttributeError(
3654 3653 '%s object has no attribute %s' % (self, item))
3655 3654
3656 3655 def __repr__(self):
3657 3656 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3658 3657
3659 3658 def versions(self):
3660 3659 return pull_request_obj.versions.order_by(
3661 3660 PullRequestVersion.pull_request_version_id).all()
3662 3661
3663 3662 def is_closed(self):
3664 3663 return pull_request_obj.is_closed()
3665 3664
3666 3665 @property
3667 3666 def pull_request_version_id(self):
3668 3667 return getattr(pull_request_obj, 'pull_request_version_id', None)
3669 3668
3670 3669 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3671 3670
3672 3671 attrs.author = StrictAttributeDict(
3673 3672 pull_request_obj.author.get_api_data())
3674 3673 if pull_request_obj.target_repo:
3675 3674 attrs.target_repo = StrictAttributeDict(
3676 3675 pull_request_obj.target_repo.get_api_data())
3677 3676 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3678 3677
3679 3678 if pull_request_obj.source_repo:
3680 3679 attrs.source_repo = StrictAttributeDict(
3681 3680 pull_request_obj.source_repo.get_api_data())
3682 3681 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3683 3682
3684 3683 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3685 3684 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3686 3685 attrs.revisions = pull_request_obj.revisions
3687 3686
3688 3687 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3689 3688 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3690 3689 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3691 3690
3692 3691 return PullRequestDisplay(attrs, internal=internal_methods)
3693 3692
3694 3693 def is_closed(self):
3695 3694 return self.status == self.STATUS_CLOSED
3696 3695
3697 3696 def __json__(self):
3698 3697 return {
3699 3698 'revisions': self.revisions,
3700 3699 }
3701 3700
3702 3701 def calculated_review_status(self):
3703 3702 from rhodecode.model.changeset_status import ChangesetStatusModel
3704 3703 return ChangesetStatusModel().calculated_review_status(self)
3705 3704
3706 3705 def reviewers_statuses(self):
3707 3706 from rhodecode.model.changeset_status import ChangesetStatusModel
3708 3707 return ChangesetStatusModel().reviewers_statuses(self)
3709 3708
3710 3709 @property
3711 3710 def workspace_id(self):
3712 3711 from rhodecode.model.pull_request import PullRequestModel
3713 3712 return PullRequestModel()._workspace_id(self)
3714 3713
3715 3714 def get_shadow_repo(self):
3716 3715 workspace_id = self.workspace_id
3717 3716 vcs_obj = self.target_repo.scm_instance()
3718 3717 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3719 3718 workspace_id)
3720 3719 return vcs_obj._get_shadow_instance(shadow_repository_path)
3721 3720
3722 3721
3723 3722 class PullRequestVersion(Base, _PullRequestBase):
3724 3723 __tablename__ = 'pull_request_versions'
3725 3724 __table_args__ = (
3726 3725 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3727 3726 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3728 3727 )
3729 3728
3730 3729 pull_request_version_id = Column(
3731 3730 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3732 3731 pull_request_id = Column(
3733 3732 'pull_request_id', Integer(),
3734 3733 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3735 3734 pull_request = relationship('PullRequest')
3736 3735
3737 3736 def __repr__(self):
3738 3737 if self.pull_request_version_id:
3739 3738 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3740 3739 else:
3741 3740 return '<DB:PullRequestVersion at %#x>' % id(self)
3742 3741
3743 3742 @property
3744 3743 def reviewers(self):
3745 3744 return self.pull_request.reviewers
3746 3745
3747 3746 @property
3748 3747 def versions(self):
3749 3748 return self.pull_request.versions
3750 3749
3751 3750 def is_closed(self):
3752 3751 # calculate from original
3753 3752 return self.pull_request.status == self.STATUS_CLOSED
3754 3753
3755 3754 def calculated_review_status(self):
3756 3755 return self.pull_request.calculated_review_status()
3757 3756
3758 3757 def reviewers_statuses(self):
3759 3758 return self.pull_request.reviewers_statuses()
3760 3759
3761 3760
3762 3761 class PullRequestReviewers(Base, BaseModel):
3763 3762 __tablename__ = 'pull_request_reviewers'
3764 3763 __table_args__ = (
3765 3764 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3766 3765 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3767 3766 )
3768 3767
3769 3768 @hybrid_property
3770 3769 def reasons(self):
3771 3770 if not self._reasons:
3772 3771 return []
3773 3772 return self._reasons
3774 3773
3775 3774 @reasons.setter
3776 3775 def reasons(self, val):
3777 3776 val = val or []
3778 3777 if any(not isinstance(x, basestring) for x in val):
3779 3778 raise Exception('invalid reasons type, must be list of strings')
3780 3779 self._reasons = val
3781 3780
3782 3781 pull_requests_reviewers_id = Column(
3783 3782 'pull_requests_reviewers_id', Integer(), nullable=False,
3784 3783 primary_key=True)
3785 3784 pull_request_id = Column(
3786 3785 "pull_request_id", Integer(),
3787 3786 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3788 3787 user_id = Column(
3789 3788 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3790 3789 _reasons = Column(
3791 3790 'reason', MutationList.as_mutable(
3792 3791 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3792
3793 3793 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3794 3794 user = relationship('User')
3795 3795 pull_request = relationship('PullRequest')
3796 3796
3797 rule_data = Column(
3798 'rule_data_json',
3799 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3800
3801 def rule_user_group_data(self):
3802 """
3803 Returns the voting user group rule data for this reviewer
3804 """
3805
3806 if self.rule_data and 'vote_rule' in self.rule_data:
3807 user_group_data = {}
3808 if 'rule_user_group_entry_id' in self.rule_data:
3809 # means a group with voting rules !
3810 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3811 user_group_data['name'] = self.rule_data['rule_name']
3812 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3813
3814 return user_group_data
3815
3816 def __unicode__(self):
3817 return u"<%s('id:%s')>" % (self.__class__.__name__,
3818 self.pull_requests_reviewers_id)
3819
3797 3820
3798 3821 class Notification(Base, BaseModel):
3799 3822 __tablename__ = 'notifications'
3800 3823 __table_args__ = (
3801 3824 Index('notification_type_idx', 'type'),
3802 3825 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3803 3826 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
3804 3827 )
3805 3828
3806 3829 TYPE_CHANGESET_COMMENT = u'cs_comment'
3807 3830 TYPE_MESSAGE = u'message'
3808 3831 TYPE_MENTION = u'mention'
3809 3832 TYPE_REGISTRATION = u'registration'
3810 3833 TYPE_PULL_REQUEST = u'pull_request'
3811 3834 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3812 3835
3813 3836 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3814 3837 subject = Column('subject', Unicode(512), nullable=True)
3815 3838 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3816 3839 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3817 3840 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3818 3841 type_ = Column('type', Unicode(255))
3819 3842
3820 3843 created_by_user = relationship('User')
3821 3844 notifications_to_users = relationship('UserNotification', lazy='joined',
3822 3845 cascade="all, delete, delete-orphan")
3823 3846
3824 3847 @property
3825 3848 def recipients(self):
3826 3849 return [x.user for x in UserNotification.query()\
3827 3850 .filter(UserNotification.notification == self)\
3828 3851 .order_by(UserNotification.user_id.asc()).all()]
3829 3852
3830 3853 @classmethod
3831 3854 def create(cls, created_by, subject, body, recipients, type_=None):
3832 3855 if type_ is None:
3833 3856 type_ = Notification.TYPE_MESSAGE
3834 3857
3835 3858 notification = cls()
3836 3859 notification.created_by_user = created_by
3837 3860 notification.subject = subject
3838 3861 notification.body = body
3839 3862 notification.type_ = type_
3840 3863 notification.created_on = datetime.datetime.now()
3841 3864
3842 3865 for u in recipients:
3843 3866 assoc = UserNotification()
3844 3867 assoc.notification = notification
3845 3868
3846 3869 # if created_by is inside recipients mark his notification
3847 3870 # as read
3848 3871 if u.user_id == created_by.user_id:
3849 3872 assoc.read = True
3850 3873
3851 3874 u.notifications.append(assoc)
3852 3875 Session().add(notification)
3853 3876
3854 3877 return notification
3855 3878
3856 3879
3857 3880 class UserNotification(Base, BaseModel):
3858 3881 __tablename__ = 'user_to_notification'
3859 3882 __table_args__ = (
3860 3883 UniqueConstraint('user_id', 'notification_id'),
3861 3884 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3862 3885 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3863 3886 )
3864 3887 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3865 3888 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3866 3889 read = Column('read', Boolean, default=False)
3867 3890 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3868 3891
3869 3892 user = relationship('User', lazy="joined")
3870 3893 notification = relationship('Notification', lazy="joined",
3871 3894 order_by=lambda: Notification.created_on.desc(),)
3872 3895
3873 3896 def mark_as_read(self):
3874 3897 self.read = True
3875 3898 Session().add(self)
3876 3899
3877 3900
3878 3901 class Gist(Base, BaseModel):
3879 3902 __tablename__ = 'gists'
3880 3903 __table_args__ = (
3881 3904 Index('g_gist_access_id_idx', 'gist_access_id'),
3882 3905 Index('g_created_on_idx', 'created_on'),
3883 3906 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3884 3907 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
3885 3908 )
3886 3909 GIST_PUBLIC = u'public'
3887 3910 GIST_PRIVATE = u'private'
3888 3911 DEFAULT_FILENAME = u'gistfile1.txt'
3889 3912
3890 3913 ACL_LEVEL_PUBLIC = u'acl_public'
3891 3914 ACL_LEVEL_PRIVATE = u'acl_private'
3892 3915
3893 3916 gist_id = Column('gist_id', Integer(), primary_key=True)
3894 3917 gist_access_id = Column('gist_access_id', Unicode(250))
3895 3918 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3896 3919 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3897 3920 gist_expires = Column('gist_expires', Float(53), nullable=False)
3898 3921 gist_type = Column('gist_type', Unicode(128), nullable=False)
3899 3922 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3900 3923 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3901 3924 acl_level = Column('acl_level', Unicode(128), nullable=True)
3902 3925
3903 3926 owner = relationship('User')
3904 3927
3905 3928 def __repr__(self):
3906 3929 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3907 3930
3908 3931 @hybrid_property
3909 3932 def description_safe(self):
3910 3933 from rhodecode.lib import helpers as h
3911 3934 return h.escape(self.gist_description)
3912 3935
3913 3936 @classmethod
3914 3937 def get_or_404(cls, id_):
3915 3938 from pyramid.httpexceptions import HTTPNotFound
3916 3939
3917 3940 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3918 3941 if not res:
3919 3942 raise HTTPNotFound()
3920 3943 return res
3921 3944
3922 3945 @classmethod
3923 3946 def get_by_access_id(cls, gist_access_id):
3924 3947 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3925 3948
3926 3949 def gist_url(self):
3927 3950 from rhodecode.model.gist import GistModel
3928 3951 return GistModel().get_url(self)
3929 3952
3930 3953 @classmethod
3931 3954 def base_path(cls):
3932 3955 """
3933 3956 Returns base path when all gists are stored
3934 3957
3935 3958 :param cls:
3936 3959 """
3937 3960 from rhodecode.model.gist import GIST_STORE_LOC
3938 3961 q = Session().query(RhodeCodeUi)\
3939 3962 .filter(RhodeCodeUi.ui_key == URL_SEP)
3940 3963 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3941 3964 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3942 3965
3943 3966 def get_api_data(self):
3944 3967 """
3945 3968 Common function for generating gist related data for API
3946 3969 """
3947 3970 gist = self
3948 3971 data = {
3949 3972 'gist_id': gist.gist_id,
3950 3973 'type': gist.gist_type,
3951 3974 'access_id': gist.gist_access_id,
3952 3975 'description': gist.gist_description,
3953 3976 'url': gist.gist_url(),
3954 3977 'expires': gist.gist_expires,
3955 3978 'created_on': gist.created_on,
3956 3979 'modified_at': gist.modified_at,
3957 3980 'content': None,
3958 3981 'acl_level': gist.acl_level,
3959 3982 }
3960 3983 return data
3961 3984
3962 3985 def __json__(self):
3963 3986 data = dict(
3964 3987 )
3965 3988 data.update(self.get_api_data())
3966 3989 return data
3967 3990 # SCM functions
3968 3991
3969 3992 def scm_instance(self, **kwargs):
3970 3993 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
3971 3994 return get_vcs_instance(
3972 3995 repo_path=safe_str(full_repo_path), create=False)
3973 3996
3974 3997
3975 3998 class ExternalIdentity(Base, BaseModel):
3976 3999 __tablename__ = 'external_identities'
3977 4000 __table_args__ = (
3978 4001 Index('local_user_id_idx', 'local_user_id'),
3979 4002 Index('external_id_idx', 'external_id'),
3980 4003 {'extend_existing': True, 'mysql_engine': 'InnoDB',
3981 4004 'mysql_charset': 'utf8'})
3982 4005
3983 4006 external_id = Column('external_id', Unicode(255), default=u'',
3984 4007 primary_key=True)
3985 4008 external_username = Column('external_username', Unicode(1024), default=u'')
3986 4009 local_user_id = Column('local_user_id', Integer(),
3987 4010 ForeignKey('users.user_id'), primary_key=True)
3988 4011 provider_name = Column('provider_name', Unicode(255), default=u'',
3989 4012 primary_key=True)
3990 4013 access_token = Column('access_token', String(1024), default=u'')
3991 4014 alt_token = Column('alt_token', String(1024), default=u'')
3992 4015 token_secret = Column('token_secret', String(1024), default=u'')
3993 4016
3994 4017 @classmethod
3995 4018 def by_external_id_and_provider(cls, external_id, provider_name,
3996 4019 local_user_id=None):
3997 4020 """
3998 4021 Returns ExternalIdentity instance based on search params
3999 4022
4000 4023 :param external_id:
4001 4024 :param provider_name:
4002 4025 :return: ExternalIdentity
4003 4026 """
4004 4027 query = cls.query()
4005 4028 query = query.filter(cls.external_id == external_id)
4006 4029 query = query.filter(cls.provider_name == provider_name)
4007 4030 if local_user_id:
4008 4031 query = query.filter(cls.local_user_id == local_user_id)
4009 4032 return query.first()
4010 4033
4011 4034 @classmethod
4012 4035 def user_by_external_id_and_provider(cls, external_id, provider_name):
4013 4036 """
4014 4037 Returns User instance based on search params
4015 4038
4016 4039 :param external_id:
4017 4040 :param provider_name:
4018 4041 :return: User
4019 4042 """
4020 4043 query = User.query()
4021 4044 query = query.filter(cls.external_id == external_id)
4022 4045 query = query.filter(cls.provider_name == provider_name)
4023 4046 query = query.filter(User.user_id == cls.local_user_id)
4024 4047 return query.first()
4025 4048
4026 4049 @classmethod
4027 4050 def by_local_user_id(cls, local_user_id):
4028 4051 """
4029 4052 Returns all tokens for user
4030 4053
4031 4054 :param local_user_id:
4032 4055 :return: ExternalIdentity
4033 4056 """
4034 4057 query = cls.query()
4035 4058 query = query.filter(cls.local_user_id == local_user_id)
4036 4059 return query
4037 4060
4038 4061
4039 4062 class Integration(Base, BaseModel):
4040 4063 __tablename__ = 'integrations'
4041 4064 __table_args__ = (
4042 4065 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4043 4066 'mysql_charset': 'utf8', 'sqlite_autoincrement': True}
4044 4067 )
4045 4068
4046 4069 integration_id = Column('integration_id', Integer(), primary_key=True)
4047 4070 integration_type = Column('integration_type', String(255))
4048 4071 enabled = Column('enabled', Boolean(), nullable=False)
4049 4072 name = Column('name', String(255), nullable=False)
4050 4073 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4051 4074 default=False)
4052 4075
4053 4076 settings = Column(
4054 4077 'settings_json', MutationObj.as_mutable(
4055 4078 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4056 4079 repo_id = Column(
4057 4080 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4058 4081 nullable=True, unique=None, default=None)
4059 4082 repo = relationship('Repository', lazy='joined')
4060 4083
4061 4084 repo_group_id = Column(
4062 4085 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4063 4086 nullable=True, unique=None, default=None)
4064 4087 repo_group = relationship('RepoGroup', lazy='joined')
4065 4088
4066 4089 @property
4067 4090 def scope(self):
4068 4091 if self.repo:
4069 4092 return repr(self.repo)
4070 4093 if self.repo_group:
4071 4094 if self.child_repos_only:
4072 4095 return repr(self.repo_group) + ' (child repos only)'
4073 4096 else:
4074 4097 return repr(self.repo_group) + ' (recursive)'
4075 4098 if self.child_repos_only:
4076 4099 return 'root_repos'
4077 4100 return 'global'
4078 4101
4079 4102 def __repr__(self):
4080 4103 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4081 4104
4082 4105
4083 4106 class RepoReviewRuleUser(Base, BaseModel):
4084 4107 __tablename__ = 'repo_review_rules_users'
4085 4108 __table_args__ = (
4086 4109 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4087 4110 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4088 4111 )
4112
4089 4113 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4090 4114 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4091 4115 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4092 4116 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4093 4117 user = relationship('User')
4094 4118
4095 4119 def rule_data(self):
4096 4120 return {
4097 4121 'mandatory': self.mandatory
4098 4122 }
4099 4123
4100 4124
4101 4125 class RepoReviewRuleUserGroup(Base, BaseModel):
4102 4126 __tablename__ = 'repo_review_rules_users_groups'
4103 4127 __table_args__ = (
4104 4128 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4105 4129 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4106 4130 )
4131 VOTE_RULE_ALL = -1
4132
4107 4133 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4108 4134 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4109 4135 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4110 4136 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4137 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4111 4138 users_group = relationship('UserGroup')
4112 4139
4113 4140 def rule_data(self):
4114 4141 return {
4115 'mandatory': self.mandatory
4142 'mandatory': self.mandatory,
4143 'vote_rule': self.vote_rule
4116 4144 }
4117 4145
4146 @property
4147 def vote_rule_label(self):
4148 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4149 return 'all must vote'
4150 else:
4151 return 'min. vote {}'.format(self.vote_rule)
4152
4118 4153
4119 4154 class RepoReviewRule(Base, BaseModel):
4120 4155 __tablename__ = 'repo_review_rules'
4121 4156 __table_args__ = (
4122 4157 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4123 4158 'mysql_charset': 'utf8', 'sqlite_autoincrement': True,}
4124 4159 )
4125 4160
4126 4161 repo_review_rule_id = Column(
4127 4162 'repo_review_rule_id', Integer(), primary_key=True)
4128 4163 repo_id = Column(
4129 4164 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4130 4165 repo = relationship('Repository', backref='review_rules')
4131 4166
4132 4167 review_rule_name = Column('review_rule_name', String(255))
4133 4168 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4134 4169 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4135 4170 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4136 4171
4137 4172 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4138 4173 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4139 4174 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4140 4175 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4141 4176
4142 4177 rule_users = relationship('RepoReviewRuleUser')
4143 4178 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4144 4179
4145 4180 def _validate_glob(self, value):
4146 4181 re.compile('^' + glob2re(value) + '$')
4147 4182
4148 4183 @hybrid_property
4149 4184 def source_branch_pattern(self):
4150 4185 return self._branch_pattern or '*'
4151 4186
4152 4187 @source_branch_pattern.setter
4153 4188 def source_branch_pattern(self, value):
4154 4189 self._validate_glob(value)
4155 4190 self._branch_pattern = value or '*'
4156 4191
4157 4192 @hybrid_property
4158 4193 def target_branch_pattern(self):
4159 4194 return self._target_branch_pattern or '*'
4160 4195
4161 4196 @target_branch_pattern.setter
4162 4197 def target_branch_pattern(self, value):
4163 4198 self._validate_glob(value)
4164 4199 self._target_branch_pattern = value or '*'
4165 4200
4166 4201 @hybrid_property
4167 4202 def file_pattern(self):
4168 4203 return self._file_pattern or '*'
4169 4204
4170 4205 @file_pattern.setter
4171 4206 def file_pattern(self, value):
4172 4207 self._validate_glob(value)
4173 4208 self._file_pattern = value or '*'
4174 4209
4175 4210 def matches(self, source_branch, target_branch, files_changed):
4176 4211 """
4177 4212 Check if this review rule matches a branch/files in a pull request
4178 4213
4179 4214 :param branch: branch name for the commit
4180 4215 :param files_changed: list of file paths changed in the pull request
4181 4216 """
4182 4217
4183 4218 source_branch = source_branch or ''
4184 4219 target_branch = target_branch or ''
4185 4220 files_changed = files_changed or []
4186 4221
4187 4222 branch_matches = True
4188 4223 if source_branch or target_branch:
4189 4224 source_branch_regex = re.compile(
4190 4225 '^' + glob2re(self.source_branch_pattern) + '$')
4191 4226 target_branch_regex = re.compile(
4192 4227 '^' + glob2re(self.target_branch_pattern) + '$')
4193 4228
4194 4229 branch_matches = (
4195 4230 bool(source_branch_regex.search(source_branch)) and
4196 4231 bool(target_branch_regex.search(target_branch))
4197 4232 )
4198 4233
4199 4234 files_matches = True
4200 4235 if self.file_pattern != '*':
4201 4236 files_matches = False
4202 4237 file_regex = re.compile(glob2re(self.file_pattern))
4203 4238 for filename in files_changed:
4204 4239 if file_regex.search(filename):
4205 4240 files_matches = True
4206 4241 break
4207 4242
4208 4243 return branch_matches and files_matches
4209 4244
4210 4245 @property
4211 4246 def review_users(self):
4212 4247 """ Returns the users which this rule applies to """
4213 4248
4214 4249 users = collections.OrderedDict()
4215 4250
4216 4251 for rule_user in self.rule_users:
4217 4252 if rule_user.user.active:
4218 4253 if rule_user.user not in users:
4219 4254 users[rule_user.user.username] = {
4220 4255 'user': rule_user.user,
4221 4256 'source': 'user',
4222 4257 'source_data': {},
4223 4258 'data': rule_user.rule_data()
4224 4259 }
4225 4260
4226 4261 for rule_user_group in self.rule_user_groups:
4227 4262 source_data = {
4263 'user_group_id': rule_user_group.users_group.users_group_id,
4228 4264 'name': rule_user_group.users_group.users_group_name,
4229 4265 'members': len(rule_user_group.users_group.members)
4230 4266 }
4231 4267 for member in rule_user_group.users_group.members:
4232 4268 if member.user.active:
4233 users[member.user.username] = {
4269 key = member.user.username
4270 if key in users:
4271 # skip this member as we have him already
4272 # this prevents from override the "first" matched
4273 # users with duplicates in multiple groups
4274 continue
4275
4276 users[key] = {
4234 4277 'user': member.user,
4235 4278 'source': 'user_group',
4236 4279 'source_data': source_data,
4237 4280 'data': rule_user_group.rule_data()
4238 4281 }
4239 4282
4240 4283 return users
4241 4284
4285 def user_group_vote_rule(self):
4286 rules = []
4287 if self.rule_user_groups:
4288 for user_group in self.rule_user_groups:
4289 rules.append(user_group)
4290 return rules
4291
4242 4292 def __repr__(self):
4243 4293 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4244 4294 self.repo_review_rule_id, self.repo)
4245 4295
4246 4296
4247 4297 class ScheduleEntry(Base, BaseModel):
4248 4298 __tablename__ = 'schedule_entries'
4249 4299 __table_args__ = (
4250 4300 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4251 4301 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4252 4302 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4253 4303 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4254 4304 )
4255 4305 schedule_types = ['crontab', 'timedelta', 'integer']
4256 4306 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4257 4307
4258 4308 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4259 4309 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4260 4310 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4261 4311
4262 4312 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4263 4313 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4264 4314
4265 4315 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4266 4316 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4267 4317
4268 4318 # task
4269 4319 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4270 4320 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4271 4321 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4272 4322 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4273 4323
4274 4324 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4275 4325 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4276 4326
4277 4327 @hybrid_property
4278 4328 def schedule_type(self):
4279 4329 return self._schedule_type
4280 4330
4281 4331 @schedule_type.setter
4282 4332 def schedule_type(self, val):
4283 4333 if val not in self.schedule_types:
4284 4334 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4285 4335 val, self.schedule_type))
4286 4336
4287 4337 self._schedule_type = val
4288 4338
4289 4339 @classmethod
4290 4340 def get_uid(cls, obj):
4291 4341 args = obj.task_args
4292 4342 kwargs = obj.task_kwargs
4293 4343 if isinstance(args, JsonRaw):
4294 4344 try:
4295 4345 args = json.loads(args)
4296 4346 except ValueError:
4297 4347 args = tuple()
4298 4348
4299 4349 if isinstance(kwargs, JsonRaw):
4300 4350 try:
4301 4351 kwargs = json.loads(kwargs)
4302 4352 except ValueError:
4303 4353 kwargs = dict()
4304 4354
4305 4355 dot_notation = obj.task_dot_notation
4306 4356 val = '.'.join(map(safe_str, [
4307 4357 sorted(dot_notation), args, sorted(kwargs.items())]))
4308 4358 return hashlib.sha1(val).hexdigest()
4309 4359
4310 4360 @classmethod
4311 4361 def get_by_schedule_name(cls, schedule_name):
4312 4362 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4313 4363
4314 4364 @classmethod
4315 4365 def get_by_schedule_id(cls, schedule_id):
4316 4366 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4317 4367
4318 4368 @property
4319 4369 def task(self):
4320 4370 return self.task_dot_notation
4321 4371
4322 4372 @property
4323 4373 def schedule(self):
4324 4374 from rhodecode.lib.celerylib.utils import raw_2_schedule
4325 4375 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4326 4376 return schedule
4327 4377
4328 4378 @property
4329 4379 def args(self):
4330 4380 try:
4331 4381 return list(self.task_args or [])
4332 4382 except ValueError:
4333 4383 return list()
4334 4384
4335 4385 @property
4336 4386 def kwargs(self):
4337 4387 try:
4338 4388 return dict(self.task_kwargs or {})
4339 4389 except ValueError:
4340 4390 return dict()
4341 4391
4342 4392 def _as_raw(self, val):
4343 4393 if hasattr(val, 'de_coerce'):
4344 4394 val = val.de_coerce()
4345 4395 if val:
4346 4396 val = json.dumps(val)
4347 4397
4348 4398 return val
4349 4399
4350 4400 @property
4351 4401 def schedule_definition_raw(self):
4352 4402 return self._as_raw(self.schedule_definition)
4353 4403
4354 4404 @property
4355 4405 def args_raw(self):
4356 4406 return self._as_raw(self.task_args)
4357 4407
4358 4408 @property
4359 4409 def kwargs_raw(self):
4360 4410 return self._as_raw(self.task_kwargs)
4361 4411
4362 4412 def __repr__(self):
4363 4413 return '<DB:ScheduleEntry({}:{})>'.format(
4364 4414 self.schedule_entry_id, self.schedule_name)
4365 4415
4366 4416
4367 4417 @event.listens_for(ScheduleEntry, 'before_update')
4368 4418 def update_task_uid(mapper, connection, target):
4369 4419 target.task_uid = ScheduleEntry.get_uid(target)
4370 4420
4371 4421
4372 4422 @event.listens_for(ScheduleEntry, 'before_insert')
4373 4423 def set_task_uid(mapper, connection, target):
4374 4424 target.task_uid = ScheduleEntry.get_uid(target)
4375 4425
4376 4426
4377 4427 class DbMigrateVersion(Base, BaseModel):
4378 4428 __tablename__ = 'db_migrate_version'
4379 4429 __table_args__ = (
4380 4430 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4381 4431 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4382 4432 )
4383 4433 repository_id = Column('repository_id', String(250), primary_key=True)
4384 4434 repository_path = Column('repository_path', Text)
4385 4435 version = Column('version', Integer)
4386 4436
4387 4437
4388 4438 class DbSession(Base, BaseModel):
4389 4439 __tablename__ = 'db_session'
4390 4440 __table_args__ = (
4391 4441 {'extend_existing': True, 'mysql_engine': 'InnoDB',
4392 4442 'mysql_charset': 'utf8', 'sqlite_autoincrement': True},
4393 4443 )
4394 4444
4395 4445 def __repr__(self):
4396 4446 return '<DB:DbSession({})>'.format(self.id)
4397 4447
4398 4448 id = Column('id', Integer())
4399 4449 namespace = Column('namespace', String(255), primary_key=True)
4400 4450 accessed = Column('accessed', DateTime, nullable=False)
4401 4451 created = Column('created', DateTime, nullable=False)
4402 4452 data = Column('data', PickleType, nullable=False)
@@ -1,614 +1,615 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 this is forms validation classes
23 23 http://formencode.org/module-formencode.validators.html
24 24 for list off all availible validators
25 25
26 26 we can create our own validators
27 27
28 28 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 29 pre_validators [] These validators will be applied before the schema
30 30 chained_validators [] These validators will be applied after the schema
31 31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35 35
36 36
37 37 <name> = formencode.validators.<name of validator>
38 38 <name> must equal form name
39 39 list=[1,2,3,4,5]
40 40 for SELECT use formencode.All(OneOf(list), Int())
41 41
42 42 """
43 43
44 44 import deform
45 45 import logging
46 46 import formencode
47 47
48 48 from pkg_resources import resource_filename
49 49 from formencode import All, Pipe
50 50
51 51 from pyramid.threadlocal import get_current_request
52 52
53 53 from rhodecode import BACKENDS
54 54 from rhodecode.lib import helpers
55 55 from rhodecode.model import validators as v
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 deform_templates = resource_filename('deform', 'templates')
61 61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
62 62 search_path = (rhodecode_templates, deform_templates)
63 63
64 64
65 65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
66 66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
67 67 def __call__(self, template_name, **kw):
68 68 kw['h'] = helpers
69 69 kw['request'] = get_current_request()
70 70 return self.load(template_name)(**kw)
71 71
72 72
73 73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
74 74 deform.Form.set_default_renderer(form_renderer)
75 75
76 76
77 77 def LoginForm(localizer):
78 78 _ = localizer
79 79
80 80 class _LoginForm(formencode.Schema):
81 81 allow_extra_fields = True
82 82 filter_extra_fields = True
83 83 username = v.UnicodeString(
84 84 strip=True,
85 85 min=1,
86 86 not_empty=True,
87 87 messages={
88 88 'empty': _(u'Please enter a login'),
89 89 'tooShort': _(u'Enter a value %(min)i characters long or more')
90 90 }
91 91 )
92 92
93 93 password = v.UnicodeString(
94 94 strip=False,
95 95 min=3,
96 96 max=72,
97 97 not_empty=True,
98 98 messages={
99 99 'empty': _(u'Please enter a password'),
100 100 'tooShort': _(u'Enter %(min)i characters or more')}
101 101 )
102 102
103 103 remember = v.StringBoolean(if_missing=False)
104 104
105 105 chained_validators = [v.ValidAuth(localizer)]
106 106 return _LoginForm
107 107
108 108
109 109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
110 110 old_data = old_data or {}
111 111 available_languages = available_languages or []
112 112 _ = localizer
113 113
114 114 class _UserForm(formencode.Schema):
115 115 allow_extra_fields = True
116 116 filter_extra_fields = True
117 117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
118 118 v.ValidUsername(localizer, edit, old_data))
119 119 if edit:
120 120 new_password = All(
121 121 v.ValidPassword(localizer),
122 122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
123 123 )
124 124 password_confirmation = All(
125 125 v.ValidPassword(localizer),
126 126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
127 127 )
128 128 admin = v.StringBoolean(if_missing=False)
129 129 else:
130 130 password = All(
131 131 v.ValidPassword(localizer),
132 132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
133 133 )
134 134 password_confirmation = All(
135 135 v.ValidPassword(localizer),
136 136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
137 137 )
138 138
139 139 password_change = v.StringBoolean(if_missing=False)
140 140 create_repo_group = v.StringBoolean(if_missing=False)
141 141
142 142 active = v.StringBoolean(if_missing=False)
143 143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
145 145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
146 146 extern_name = v.UnicodeString(strip=True)
147 147 extern_type = v.UnicodeString(strip=True)
148 148 language = v.OneOf(available_languages, hideList=False,
149 149 testValueList=True, if_missing=None)
150 150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 151 return _UserForm
152 152
153 153
154 154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 155 old_data = old_data or {}
156 156 _ = localizer
157 157
158 158 class _UserGroupForm(formencode.Schema):
159 159 allow_extra_fields = True
160 160 filter_extra_fields = True
161 161
162 162 users_group_name = All(
163 163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 164 v.ValidUserGroup(localizer, edit, old_data)
165 165 )
166 166 user_group_description = v.UnicodeString(strip=True, min=1,
167 167 not_empty=False)
168 168
169 169 users_group_active = v.StringBoolean(if_missing=False)
170 170
171 171 if edit:
172 172 # this is user group owner
173 173 user = All(
174 174 v.UnicodeString(not_empty=True),
175 175 v.ValidRepoUser(localizer, allow_disabled))
176 176 return _UserGroupForm
177 177
178 178
179 179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 180 can_create_in_root=False, allow_disabled=False):
181 181 _ = localizer
182 182 old_data = old_data or {}
183 183 available_groups = available_groups or []
184 184
185 185 class _RepoGroupForm(formencode.Schema):
186 186 allow_extra_fields = True
187 187 filter_extra_fields = False
188 188
189 189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 190 v.SlugifyName(localizer),)
191 191 group_description = v.UnicodeString(strip=True, min=1,
192 192 not_empty=False)
193 193 group_copy_permissions = v.StringBoolean(if_missing=False)
194 194
195 195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 196 testValueList=True, not_empty=True)
197 197 enable_locking = v.StringBoolean(if_missing=False)
198 198 chained_validators = [
199 199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200 200
201 201 if edit:
202 202 # this is repo group owner
203 203 user = All(
204 204 v.UnicodeString(not_empty=True),
205 205 v.ValidRepoUser(localizer, allow_disabled))
206 206 return _RepoGroupForm
207 207
208 208
209 209 def RegisterForm(localizer, edit=False, old_data=None):
210 210 _ = localizer
211 211 old_data = old_data or {}
212 212
213 213 class _RegisterForm(formencode.Schema):
214 214 allow_extra_fields = True
215 215 filter_extra_fields = True
216 216 username = All(
217 217 v.ValidUsername(localizer, edit, old_data),
218 218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 219 )
220 220 password = All(
221 221 v.ValidPassword(localizer),
222 222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 223 )
224 224 password_confirmation = All(
225 225 v.ValidPassword(localizer),
226 226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 227 )
228 228 active = v.StringBoolean(if_missing=False)
229 229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232 232
233 233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 234 return _RegisterForm
235 235
236 236
237 237 def PasswordResetForm(localizer):
238 238 _ = localizer
239 239
240 240 class _PasswordResetForm(formencode.Schema):
241 241 allow_extra_fields = True
242 242 filter_extra_fields = True
243 243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 244 return _PasswordResetForm
245 245
246 246
247 247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
248 248 landing_revs=None, allow_disabled=False):
249 249 _ = localizer
250 250 old_data = old_data or {}
251 251 repo_groups = repo_groups or []
252 252 landing_revs = landing_revs or []
253 253 supported_backends = BACKENDS.keys()
254 254
255 255 class _RepoForm(formencode.Schema):
256 256 allow_extra_fields = True
257 257 filter_extra_fields = False
258 258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
259 259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
260 260 repo_group = All(v.CanWriteGroup(localizer, old_data),
261 261 v.OneOf(repo_groups, hideList=True))
262 262 repo_type = v.OneOf(supported_backends, required=False,
263 263 if_missing=old_data.get('repo_type'))
264 264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
265 265 repo_private = v.StringBoolean(if_missing=False)
266 266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
267 267 repo_copy_permissions = v.StringBoolean(if_missing=False)
268 268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
269 269
270 270 repo_enable_statistics = v.StringBoolean(if_missing=False)
271 271 repo_enable_downloads = v.StringBoolean(if_missing=False)
272 272 repo_enable_locking = v.StringBoolean(if_missing=False)
273 273
274 274 if edit:
275 275 # this is repo owner
276 276 user = All(
277 277 v.UnicodeString(not_empty=True),
278 278 v.ValidRepoUser(localizer, allow_disabled))
279 279 clone_uri_change = v.UnicodeString(
280 280 not_empty=False, if_missing=v.Missing)
281 281
282 282 chained_validators = [v.ValidCloneUri(localizer),
283 283 v.ValidRepoName(localizer, edit, old_data)]
284 284 return _RepoForm
285 285
286 286
287 287 def RepoPermsForm(localizer):
288 288 _ = localizer
289 289
290 290 class _RepoPermsForm(formencode.Schema):
291 291 allow_extra_fields = True
292 292 filter_extra_fields = False
293 293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
294 294 return _RepoPermsForm
295 295
296 296
297 297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
298 298 _ = localizer
299 299
300 300 class _RepoGroupPermsForm(formencode.Schema):
301 301 allow_extra_fields = True
302 302 filter_extra_fields = False
303 303 recursive = v.OneOf(valid_recursive_choices)
304 304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
305 305 return _RepoGroupPermsForm
306 306
307 307
308 308 def UserGroupPermsForm(localizer):
309 309 _ = localizer
310 310
311 311 class _UserPermsForm(formencode.Schema):
312 312 allow_extra_fields = True
313 313 filter_extra_fields = False
314 314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
315 315 return _UserPermsForm
316 316
317 317
318 318 def RepoFieldForm(localizer):
319 319 _ = localizer
320 320
321 321 class _RepoFieldForm(formencode.Schema):
322 322 filter_extra_fields = True
323 323 allow_extra_fields = True
324 324
325 325 new_field_key = All(v.FieldKey(localizer),
326 326 v.UnicodeString(strip=True, min=3, not_empty=True))
327 327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
328 328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
329 329 if_missing='str')
330 330 new_field_label = v.UnicodeString(not_empty=False)
331 331 new_field_desc = v.UnicodeString(not_empty=False)
332 332 return _RepoFieldForm
333 333
334 334
335 335 def RepoForkForm(localizer, edit=False, old_data=None,
336 336 supported_backends=BACKENDS.keys(), repo_groups=None,
337 337 landing_revs=None):
338 338 _ = localizer
339 339 old_data = old_data or {}
340 340 repo_groups = repo_groups or []
341 341 landing_revs = landing_revs or []
342 342
343 343 class _RepoForkForm(formencode.Schema):
344 344 allow_extra_fields = True
345 345 filter_extra_fields = False
346 346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
347 347 v.SlugifyName(localizer))
348 348 repo_group = All(v.CanWriteGroup(localizer, ),
349 349 v.OneOf(repo_groups, hideList=True))
350 350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
351 351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
352 352 private = v.StringBoolean(if_missing=False)
353 353 copy_permissions = v.StringBoolean(if_missing=False)
354 354 fork_parent_id = v.UnicodeString()
355 355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
356 356 landing_rev = v.OneOf(landing_revs, hideList=True)
357 357 return _RepoForkForm
358 358
359 359
360 360 def ApplicationSettingsForm(localizer):
361 361 _ = localizer
362 362
363 363 class _ApplicationSettingsForm(formencode.Schema):
364 364 allow_extra_fields = True
365 365 filter_extra_fields = False
366 366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
367 367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
368 368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
370 370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
372 372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
373 373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
374 374 return _ApplicationSettingsForm
375 375
376 376
377 377 def ApplicationVisualisationForm(localizer):
378 378 _ = localizer
379 379
380 380 class _ApplicationVisualisationForm(formencode.Schema):
381 381 allow_extra_fields = True
382 382 filter_extra_fields = False
383 383 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
384 384 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
385 385 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
386 386
387 387 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
388 388 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
389 389 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
390 390 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
391 391 rhodecode_show_version = v.StringBoolean(if_missing=False)
392 392 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
393 393 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
394 394 rhodecode_gravatar_url = v.UnicodeString(min=3)
395 395 rhodecode_clone_uri_tmpl = v.UnicodeString(min=3)
396 396 rhodecode_support_url = v.UnicodeString()
397 397 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
398 398 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
399 399 return _ApplicationVisualisationForm
400 400
401 401
402 402 class _BaseVcsSettingsForm(formencode.Schema):
403 403
404 404 allow_extra_fields = True
405 405 filter_extra_fields = False
406 406 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
407 407 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
408 408 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
409 409
410 410 # PR/Code-review
411 411 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
412 412 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
413 413
414 414 # hg
415 415 extensions_largefiles = v.StringBoolean(if_missing=False)
416 416 extensions_evolve = v.StringBoolean(if_missing=False)
417 417 phases_publish = v.StringBoolean(if_missing=False)
418 418
419 419 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
420 420 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
421 421
422 422 # git
423 423 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
424 424 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
425 425 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
426 426
427 427 # svn
428 428 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
429 429 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
430 430
431 431
432 432 def ApplicationUiSettingsForm(localizer):
433 433 _ = localizer
434 434
435 435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
436 436 web_push_ssl = v.StringBoolean(if_missing=False)
437 437 paths_root_path = All(
438 438 v.ValidPath(localizer),
439 439 v.UnicodeString(strip=True, min=1, not_empty=True)
440 440 )
441 441 largefiles_usercache = All(
442 442 v.ValidPath(localizer),
443 443 v.UnicodeString(strip=True, min=2, not_empty=True))
444 444 vcs_git_lfs_store_location = All(
445 445 v.ValidPath(localizer),
446 446 v.UnicodeString(strip=True, min=2, not_empty=True))
447 447 extensions_hgsubversion = v.StringBoolean(if_missing=False)
448 448 extensions_hggit = v.StringBoolean(if_missing=False)
449 449 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
450 450 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
451 451 return _ApplicationUiSettingsForm
452 452
453 453
454 454 def RepoVcsSettingsForm(localizer, repo_name):
455 455 _ = localizer
456 456
457 457 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
458 458 inherit_global_settings = v.StringBoolean(if_missing=False)
459 459 new_svn_branch = v.ValidSvnPattern(localizer,
460 460 section='vcs_svn_branch', repo_name=repo_name)
461 461 new_svn_tag = v.ValidSvnPattern(localizer,
462 462 section='vcs_svn_tag', repo_name=repo_name)
463 463 return _RepoVcsSettingsForm
464 464
465 465
466 466 def LabsSettingsForm(localizer):
467 467 _ = localizer
468 468
469 469 class _LabSettingsForm(formencode.Schema):
470 470 allow_extra_fields = True
471 471 filter_extra_fields = False
472 472 return _LabSettingsForm
473 473
474 474
475 475 def ApplicationPermissionsForm(
476 476 localizer, register_choices, password_reset_choices,
477 477 extern_activate_choices):
478 478 _ = localizer
479 479
480 480 class _DefaultPermissionsForm(formencode.Schema):
481 481 allow_extra_fields = True
482 482 filter_extra_fields = True
483 483
484 484 anonymous = v.StringBoolean(if_missing=False)
485 485 default_register = v.OneOf(register_choices)
486 486 default_register_message = v.UnicodeString()
487 487 default_password_reset = v.OneOf(password_reset_choices)
488 488 default_extern_activate = v.OneOf(extern_activate_choices)
489 489 return _DefaultPermissionsForm
490 490
491 491
492 492 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
493 493 user_group_perms_choices):
494 494 _ = localizer
495 495
496 496 class _ObjectPermissionsForm(formencode.Schema):
497 497 allow_extra_fields = True
498 498 filter_extra_fields = True
499 499 overwrite_default_repo = v.StringBoolean(if_missing=False)
500 500 overwrite_default_group = v.StringBoolean(if_missing=False)
501 501 overwrite_default_user_group = v.StringBoolean(if_missing=False)
502 502 default_repo_perm = v.OneOf(repo_perms_choices)
503 503 default_group_perm = v.OneOf(group_perms_choices)
504 504 default_user_group_perm = v.OneOf(user_group_perms_choices)
505 505 return _ObjectPermissionsForm
506 506
507 507
508 508 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
509 509 repo_group_create_choices, user_group_create_choices,
510 510 fork_choices, inherit_default_permissions_choices):
511 511 _ = localizer
512 512
513 513 class _DefaultPermissionsForm(formencode.Schema):
514 514 allow_extra_fields = True
515 515 filter_extra_fields = True
516 516
517 517 anonymous = v.StringBoolean(if_missing=False)
518 518
519 519 default_repo_create = v.OneOf(create_choices)
520 520 default_repo_create_on_write = v.OneOf(create_on_write_choices)
521 521 default_user_group_create = v.OneOf(user_group_create_choices)
522 522 default_repo_group_create = v.OneOf(repo_group_create_choices)
523 523 default_fork_create = v.OneOf(fork_choices)
524 524 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
525 525 return _DefaultPermissionsForm
526 526
527 527
528 528 def UserIndividualPermissionsForm(localizer):
529 529 _ = localizer
530 530
531 531 class _DefaultPermissionsForm(formencode.Schema):
532 532 allow_extra_fields = True
533 533 filter_extra_fields = True
534 534
535 535 inherit_default_permissions = v.StringBoolean(if_missing=False)
536 536 return _DefaultPermissionsForm
537 537
538 538
539 539 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
540 540 _ = localizer
541 541 old_data = old_data or {}
542 542
543 543 class _DefaultsForm(formencode.Schema):
544 544 allow_extra_fields = True
545 545 filter_extra_fields = True
546 546 default_repo_type = v.OneOf(supported_backends)
547 547 default_repo_private = v.StringBoolean(if_missing=False)
548 548 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
549 549 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
550 550 default_repo_enable_locking = v.StringBoolean(if_missing=False)
551 551 return _DefaultsForm
552 552
553 553
554 554 def AuthSettingsForm(localizer):
555 555 _ = localizer
556 556
557 557 class _AuthSettingsForm(formencode.Schema):
558 558 allow_extra_fields = True
559 559 filter_extra_fields = True
560 560 auth_plugins = All(v.ValidAuthPlugins(localizer),
561 561 v.UniqueListFromString(localizer)(not_empty=True))
562 562 return _AuthSettingsForm
563 563
564 564
565 565 def UserExtraEmailForm(localizer):
566 566 _ = localizer
567 567
568 568 class _UserExtraEmailForm(formencode.Schema):
569 569 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
570 570 return _UserExtraEmailForm
571 571
572 572
573 573 def UserExtraIpForm(localizer):
574 574 _ = localizer
575 575
576 576 class _UserExtraIpForm(formencode.Schema):
577 577 ip = v.ValidIp(localizer)(not_empty=True)
578 578 return _UserExtraIpForm
579 579
580 580
581 581 def PullRequestForm(localizer, repo_id):
582 582 _ = localizer
583 583
584 584 class ReviewerForm(formencode.Schema):
585 585 user_id = v.Int(not_empty=True)
586 586 reasons = All()
587 rules = All(v.UniqueList(localizer, convert=int)())
587 588 mandatory = v.StringBoolean()
588 589
589 590 class _PullRequestForm(formencode.Schema):
590 591 allow_extra_fields = True
591 592 filter_extra_fields = True
592 593
593 594 common_ancestor = v.UnicodeString(strip=True, required=True)
594 595 source_repo = v.UnicodeString(strip=True, required=True)
595 596 source_ref = v.UnicodeString(strip=True, required=True)
596 597 target_repo = v.UnicodeString(strip=True, required=True)
597 598 target_ref = v.UnicodeString(strip=True, required=True)
598 599 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
599 600 v.UniqueList(localizer)(not_empty=True))
600 601 review_members = formencode.ForEach(ReviewerForm())
601 602 pullrequest_title = v.UnicodeString(strip=True, required=True, min=3, max=255)
602 603 pullrequest_desc = v.UnicodeString(strip=True, required=False)
603 604
604 605 return _PullRequestForm
605 606
606 607
607 608 def IssueTrackerPatternsForm(localizer):
608 609 _ = localizer
609 610
610 611 class _IssueTrackerPatternsForm(formencode.Schema):
611 612 allow_extra_fields = True
612 613 filter_extra_fields = False
613 614 chained_validators = [v.ValidPattern(localizer)]
614 615 return _IssueTrackerPatternsForm
@@ -1,1654 +1,1681 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34
35 35 from rhodecode import events
36 36 from rhodecode.translation import lazy_ugettext#, _
37 37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 41 from rhodecode.lib.markup_renderer import (
42 42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 44 from rhodecode.lib.vcs.backends.base import (
45 45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 47 from rhodecode.lib.vcs.exceptions import (
48 48 CommitDoesNotExistError, EmptyRepositoryError)
49 49 from rhodecode.model import BaseModel
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.db import (
53 53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.notification import NotificationModel, \
57 57 EmailNotificationModel
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 # Data structure to hold the response data when updating commits during a pull
66 66 # request update.
67 67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 68 'executed', 'reason', 'new', 'old', 'changes',
69 69 'source_changed', 'target_changed'])
70 70
71 71
72 72 class PullRequestModel(BaseModel):
73 73
74 74 cls = PullRequest
75 75
76 76 DIFF_CONTEXT = 3
77 77
78 78 MERGE_STATUS_MESSAGES = {
79 79 MergeFailureReason.NONE: lazy_ugettext(
80 80 'This pull request can be automatically merged.'),
81 81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 82 'This pull request cannot be merged because of an unhandled'
83 83 ' exception.'),
84 84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 85 'This pull request cannot be merged because of merge conflicts.'),
86 86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 87 'This pull request could not be merged because push to target'
88 88 ' failed.'),
89 89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 90 'This pull request cannot be merged because the target is not a'
91 91 ' head.'),
92 92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 93 'This pull request cannot be merged because the source contains'
94 94 ' more branches than the target.'),
95 95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 96 'This pull request cannot be merged because the target has'
97 97 ' multiple heads.'),
98 98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 99 'This pull request cannot be merged because the target repository'
100 100 ' is locked.'),
101 101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 102 'This pull request cannot be merged because the target or the '
103 103 'source reference is missing.'),
104 104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 105 'This pull request cannot be merged because the target '
106 106 'reference is missing.'),
107 107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 108 'This pull request cannot be merged because the source '
109 109 'reference is missing.'),
110 110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 111 'This pull request cannot be merged because of conflicts related '
112 112 'to sub repositories.'),
113 113 }
114 114
115 115 UPDATE_STATUS_MESSAGES = {
116 116 UpdateFailureReason.NONE: lazy_ugettext(
117 117 'Pull request update successful.'),
118 118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 119 'Pull request update failed because of an unknown error.'),
120 120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 121 'No update needed because the source and target have not changed.'),
122 122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 123 'Pull request cannot be updated because the reference type is '
124 124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 126 'This pull request cannot be updated because the target '
127 127 'reference is missing.'),
128 128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 129 'This pull request cannot be updated because the source '
130 130 'reference is missing.'),
131 131 }
132 132
133 133 def __get_pull_request(self, pull_request):
134 134 return self._get_instance((
135 135 PullRequest, PullRequestVersion), pull_request)
136 136
137 137 def _check_perms(self, perms, pull_request, user, api=False):
138 138 if not api:
139 139 return h.HasRepoPermissionAny(*perms)(
140 140 user=user, repo_name=pull_request.target_repo.repo_name)
141 141 else:
142 142 return h.HasRepoPermissionAnyApi(*perms)(
143 143 user=user, repo_name=pull_request.target_repo.repo_name)
144 144
145 145 def check_user_read(self, pull_request, user, api=False):
146 146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 147 return self._check_perms(_perms, pull_request, user, api)
148 148
149 149 def check_user_merge(self, pull_request, user, api=False):
150 150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 151 return self._check_perms(_perms, pull_request, user, api)
152 152
153 153 def check_user_update(self, pull_request, user, api=False):
154 154 owner = user.user_id == pull_request.user_id
155 155 return self.check_user_merge(pull_request, user, api) or owner
156 156
157 157 def check_user_delete(self, pull_request, user):
158 158 owner = user.user_id == pull_request.user_id
159 159 _perms = ('repository.admin',)
160 160 return self._check_perms(_perms, pull_request, user) or owner
161 161
162 162 def check_user_change_status(self, pull_request, user, api=False):
163 163 reviewer = user.user_id in [x.user_id for x in
164 164 pull_request.reviewers]
165 165 return self.check_user_update(pull_request, user, api) or reviewer
166 166
167 167 def check_user_comment(self, pull_request, user):
168 168 owner = user.user_id == pull_request.user_id
169 169 return self.check_user_read(pull_request, user) or owner
170 170
171 171 def get(self, pull_request):
172 172 return self.__get_pull_request(pull_request)
173 173
174 174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 175 opened_by=None, order_by=None,
176 176 order_dir='desc'):
177 177 repo = None
178 178 if repo_name:
179 179 repo = self._get_repo(repo_name)
180 180
181 181 q = PullRequest.query()
182 182
183 183 # source or target
184 184 if repo and source:
185 185 q = q.filter(PullRequest.source_repo == repo)
186 186 elif repo:
187 187 q = q.filter(PullRequest.target_repo == repo)
188 188
189 189 # closed,opened
190 190 if statuses:
191 191 q = q.filter(PullRequest.status.in_(statuses))
192 192
193 193 # opened by filter
194 194 if opened_by:
195 195 q = q.filter(PullRequest.user_id.in_(opened_by))
196 196
197 197 if order_by:
198 198 order_map = {
199 199 'name_raw': PullRequest.pull_request_id,
200 200 'title': PullRequest.title,
201 201 'updated_on_raw': PullRequest.updated_on,
202 202 'target_repo': PullRequest.target_repo_id
203 203 }
204 204 if order_dir == 'asc':
205 205 q = q.order_by(order_map[order_by].asc())
206 206 else:
207 207 q = q.order_by(order_map[order_by].desc())
208 208
209 209 return q
210 210
211 211 def count_all(self, repo_name, source=False, statuses=None,
212 212 opened_by=None):
213 213 """
214 214 Count the number of pull requests for a specific repository.
215 215
216 216 :param repo_name: target or source repo
217 217 :param source: boolean flag to specify if repo_name refers to source
218 218 :param statuses: list of pull request statuses
219 219 :param opened_by: author user of the pull request
220 220 :returns: int number of pull requests
221 221 """
222 222 q = self._prepare_get_all_query(
223 223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224 224
225 225 return q.count()
226 226
227 227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 228 offset=0, length=None, order_by=None, order_dir='desc'):
229 229 """
230 230 Get all pull requests for a specific repository.
231 231
232 232 :param repo_name: target or source repo
233 233 :param source: boolean flag to specify if repo_name refers to source
234 234 :param statuses: list of pull request statuses
235 235 :param opened_by: author user of the pull request
236 236 :param offset: pagination offset
237 237 :param length: length of returned list
238 238 :param order_by: order of the returned list
239 239 :param order_dir: 'asc' or 'desc' ordering direction
240 240 :returns: list of pull requests
241 241 """
242 242 q = self._prepare_get_all_query(
243 243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 244 order_by=order_by, order_dir=order_dir)
245 245
246 246 if length:
247 247 pull_requests = q.limit(length).offset(offset).all()
248 248 else:
249 249 pull_requests = q.all()
250 250
251 251 return pull_requests
252 252
253 253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 254 opened_by=None):
255 255 """
256 256 Count the number of pull requests for a specific repository that are
257 257 awaiting review.
258 258
259 259 :param repo_name: target or source repo
260 260 :param source: boolean flag to specify if repo_name refers to source
261 261 :param statuses: list of pull request statuses
262 262 :param opened_by: author user of the pull request
263 263 :returns: int number of pull requests
264 264 """
265 265 pull_requests = self.get_awaiting_review(
266 266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267 267
268 268 return len(pull_requests)
269 269
270 270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 271 opened_by=None, offset=0, length=None,
272 272 order_by=None, order_dir='desc'):
273 273 """
274 274 Get all pull requests for a specific repository that are awaiting
275 275 review.
276 276
277 277 :param repo_name: target or source repo
278 278 :param source: boolean flag to specify if repo_name refers to source
279 279 :param statuses: list of pull request statuses
280 280 :param opened_by: author user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _filtered_pull_requests = []
292 292 for pr in pull_requests:
293 293 status = pr.calculated_review_status()
294 294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 296 _filtered_pull_requests.append(pr)
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 303 opened_by=None, user_id=None):
304 304 """
305 305 Count the number of pull requests for a specific repository that are
306 306 awaiting review from a specific user.
307 307
308 308 :param repo_name: target or source repo
309 309 :param source: boolean flag to specify if repo_name refers to source
310 310 :param statuses: list of pull request statuses
311 311 :param opened_by: author user of the pull request
312 312 :param user_id: reviewer user of the pull request
313 313 :returns: int number of pull requests
314 314 """
315 315 pull_requests = self.get_awaiting_my_review(
316 316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 317 user_id=user_id)
318 318
319 319 return len(pull_requests)
320 320
321 321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 322 opened_by=None, user_id=None, offset=0,
323 323 length=None, order_by=None, order_dir='desc'):
324 324 """
325 325 Get all pull requests for a specific repository that are awaiting
326 326 review from a specific user.
327 327
328 328 :param repo_name: target or source repo
329 329 :param source: boolean flag to specify if repo_name refers to source
330 330 :param statuses: list of pull request statuses
331 331 :param opened_by: author user of the pull request
332 332 :param user_id: reviewer user of the pull request
333 333 :param offset: pagination offset
334 334 :param length: length of returned list
335 335 :param order_by: order of the returned list
336 336 :param order_dir: 'asc' or 'desc' ordering direction
337 337 :returns: list of pull requests
338 338 """
339 339 pull_requests = self.get_all(
340 340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 341 order_by=order_by, order_dir=order_dir)
342 342
343 343 _my = PullRequestModel().get_not_reviewed(user_id)
344 344 my_participation = []
345 345 for pr in pull_requests:
346 346 if pr in _my:
347 347 my_participation.append(pr)
348 348 _filtered_pull_requests = my_participation
349 349 if length:
350 350 return _filtered_pull_requests[offset:offset+length]
351 351 else:
352 352 return _filtered_pull_requests
353 353
354 354 def get_not_reviewed(self, user_id):
355 355 return [
356 356 x.pull_request for x in PullRequestReviewers.query().filter(
357 357 PullRequestReviewers.user_id == user_id).all()
358 358 ]
359 359
360 360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 361 order_by=None, order_dir='desc'):
362 362 q = PullRequest.query()
363 363 if user_id:
364 364 reviewers_subquery = Session().query(
365 365 PullRequestReviewers.pull_request_id).filter(
366 366 PullRequestReviewers.user_id == user_id).subquery()
367 367 user_filter = or_(
368 368 PullRequest.user_id == user_id,
369 369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 370 )
371 371 q = PullRequest.query().filter(user_filter)
372 372
373 373 # closed,opened
374 374 if statuses:
375 375 q = q.filter(PullRequest.status.in_(statuses))
376 376
377 377 if order_by:
378 378 order_map = {
379 379 'name_raw': PullRequest.pull_request_id,
380 380 'title': PullRequest.title,
381 381 'updated_on_raw': PullRequest.updated_on,
382 382 'target_repo': PullRequest.target_repo_id
383 383 }
384 384 if order_dir == 'asc':
385 385 q = q.order_by(order_map[order_by].asc())
386 386 else:
387 387 q = q.order_by(order_map[order_by].desc())
388 388
389 389 return q
390 390
391 391 def count_im_participating_in(self, user_id=None, statuses=None):
392 392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 393 return q.count()
394 394
395 395 def get_im_participating_in(
396 396 self, user_id=None, statuses=None, offset=0,
397 397 length=None, order_by=None, order_dir='desc'):
398 398 """
399 399 Get all Pull requests that i'm participating in, or i have opened
400 400 """
401 401
402 402 q = self._prepare_participating_query(
403 403 user_id, statuses=statuses, order_by=order_by,
404 404 order_dir=order_dir)
405 405
406 406 if length:
407 407 pull_requests = q.limit(length).offset(offset).all()
408 408 else:
409 409 pull_requests = q.all()
410 410
411 411 return pull_requests
412 412
413 413 def get_versions(self, pull_request):
414 414 """
415 415 returns version of pull request sorted by ID descending
416 416 """
417 417 return PullRequestVersion.query()\
418 418 .filter(PullRequestVersion.pull_request == pull_request)\
419 419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 420 .all()
421 421
422 422 def get_pr_version(self, pull_request_id, version=None):
423 423 at_version = None
424 424
425 425 if version and version == 'latest':
426 426 pull_request_ver = PullRequest.get(pull_request_id)
427 427 pull_request_obj = pull_request_ver
428 428 _org_pull_request_obj = pull_request_obj
429 429 at_version = 'latest'
430 430 elif version:
431 431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 432 pull_request_obj = pull_request_ver
433 433 _org_pull_request_obj = pull_request_ver.pull_request
434 434 at_version = pull_request_ver.pull_request_version_id
435 435 else:
436 436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 437 pull_request_id)
438 438
439 439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 440 pull_request_obj, _org_pull_request_obj)
441 441
442 442 return _org_pull_request_obj, pull_request_obj, \
443 443 pull_request_display_obj, at_version
444 444
445 445 def create(self, created_by, source_repo, source_ref, target_repo,
446 446 target_ref, revisions, reviewers, title, description=None,
447 447 reviewer_data=None, translator=None):
448 448 translator = translator or get_current_request().translate
449 449
450 450 created_by_user = self._get_user(created_by)
451 451 source_repo = self._get_repo(source_repo)
452 452 target_repo = self._get_repo(target_repo)
453 453
454 454 pull_request = PullRequest()
455 455 pull_request.source_repo = source_repo
456 456 pull_request.source_ref = source_ref
457 457 pull_request.target_repo = target_repo
458 458 pull_request.target_ref = target_ref
459 459 pull_request.revisions = revisions
460 460 pull_request.title = title
461 461 pull_request.description = description
462 462 pull_request.author = created_by_user
463 463 pull_request.reviewer_data = reviewer_data
464 464
465 465 Session().add(pull_request)
466 466 Session().flush()
467 467
468 468 reviewer_ids = set()
469 469 # members / reviewers
470 470 for reviewer_object in reviewers:
471 user_id, reasons, mandatory = reviewer_object
471 user_id, reasons, mandatory, rules = reviewer_object
472 472 user = self._get_user(user_id)
473 473
474 474 # skip duplicates
475 475 if user.user_id in reviewer_ids:
476 476 continue
477 477
478 478 reviewer_ids.add(user.user_id)
479 479
480 480 reviewer = PullRequestReviewers()
481 481 reviewer.user = user
482 482 reviewer.pull_request = pull_request
483 483 reviewer.reasons = reasons
484 484 reviewer.mandatory = mandatory
485
486 # NOTE(marcink): pick only first rule for now
487 rule_id = rules[0] if rules else None
488 rule = RepoReviewRule.get(rule_id) if rule_id else None
489 if rule:
490 review_group = rule.user_group_vote_rule()
491 if review_group:
492 # NOTE(marcink):
493 # again, can be that user is member of more,
494 # but we pick the first same, as default reviewers algo
495 review_group = review_group[0]
496
497 rule_data = {
498 'rule_name':
499 rule.review_rule_name,
500 'rule_user_group_entry_id':
501 review_group.repo_review_rule_users_group_id,
502 'rule_user_group_name':
503 review_group.users_group.users_group_name,
504 'rule_user_group_members':
505 [x.user.username for x in review_group.users_group.members],
506 }
507 # e.g {'vote_rule': -1, 'mandatory': True}
508 rule_data.update(review_group.rule_data())
509
510 reviewer.rule_data = rule_data
511
485 512 Session().add(reviewer)
486 513
487 514 # Set approval status to "Under Review" for all commits which are
488 515 # part of this pull request.
489 516 ChangesetStatusModel().set_status(
490 517 repo=target_repo,
491 518 status=ChangesetStatus.STATUS_UNDER_REVIEW,
492 519 user=created_by_user,
493 520 pull_request=pull_request
494 521 )
495 522
496 523 MergeCheck.validate(
497 524 pull_request, user=created_by_user, translator=translator)
498 525
499 526 self.notify_reviewers(pull_request, reviewer_ids)
500 527 self._trigger_pull_request_hook(
501 528 pull_request, created_by_user, 'create')
502 529
503 530 creation_data = pull_request.get_api_data(with_merge_state=False)
504 531 self._log_audit_action(
505 532 'repo.pull_request.create', {'data': creation_data},
506 533 created_by_user, pull_request)
507 534
508 535 return pull_request
509 536
510 537 def _trigger_pull_request_hook(self, pull_request, user, action):
511 538 pull_request = self.__get_pull_request(pull_request)
512 539 target_scm = pull_request.target_repo.scm_instance()
513 540 if action == 'create':
514 541 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
515 542 elif action == 'merge':
516 543 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
517 544 elif action == 'close':
518 545 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
519 546 elif action == 'review_status_change':
520 547 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
521 548 elif action == 'update':
522 549 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
523 550 else:
524 551 return
525 552
526 553 trigger_hook(
527 554 username=user.username,
528 555 repo_name=pull_request.target_repo.repo_name,
529 556 repo_alias=target_scm.alias,
530 557 pull_request=pull_request)
531 558
532 559 def _get_commit_ids(self, pull_request):
533 560 """
534 561 Return the commit ids of the merged pull request.
535 562
536 563 This method is not dealing correctly yet with the lack of autoupdates
537 564 nor with the implicit target updates.
538 565 For example: if a commit in the source repo is already in the target it
539 566 will be reported anyways.
540 567 """
541 568 merge_rev = pull_request.merge_rev
542 569 if merge_rev is None:
543 570 raise ValueError('This pull request was not merged yet')
544 571
545 572 commit_ids = list(pull_request.revisions)
546 573 if merge_rev not in commit_ids:
547 574 commit_ids.append(merge_rev)
548 575
549 576 return commit_ids
550 577
551 578 def merge(self, pull_request, user, extras):
552 579 log.debug("Merging pull request %s", pull_request.pull_request_id)
553 580 merge_state = self._merge_pull_request(pull_request, user, extras)
554 581 if merge_state.executed:
555 582 log.debug(
556 583 "Merge was successful, updating the pull request comments.")
557 584 self._comment_and_close_pr(pull_request, user, merge_state)
558 585
559 586 self._log_audit_action(
560 587 'repo.pull_request.merge',
561 588 {'merge_state': merge_state.__dict__},
562 589 user, pull_request)
563 590
564 591 else:
565 592 log.warn("Merge failed, not updating the pull request.")
566 593 return merge_state
567 594
568 595 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
569 596 target_vcs = pull_request.target_repo.scm_instance()
570 597 source_vcs = pull_request.source_repo.scm_instance()
571 598 target_ref = self._refresh_reference(
572 599 pull_request.target_ref_parts, target_vcs)
573 600
574 601 message = merge_msg or (
575 602 'Merge pull request #%(pr_id)s from '
576 603 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
577 604 'pr_id': pull_request.pull_request_id,
578 605 'source_repo': source_vcs.name,
579 606 'source_ref_name': pull_request.source_ref_parts.name,
580 607 'pr_title': pull_request.title
581 608 }
582 609
583 610 workspace_id = self._workspace_id(pull_request)
584 611 use_rebase = self._use_rebase_for_merging(pull_request)
585 612 close_branch = self._close_branch_before_merging(pull_request)
586 613
587 614 callback_daemon, extras = prepare_callback_daemon(
588 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
589 616 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
590 617
591 618 with callback_daemon:
592 619 # TODO: johbo: Implement a clean way to run a config_override
593 620 # for a single call.
594 621 target_vcs.config.set(
595 622 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
596 623 merge_state = target_vcs.merge(
597 624 target_ref, source_vcs, pull_request.source_ref_parts,
598 625 workspace_id, user_name=user.username,
599 626 user_email=user.email, message=message, use_rebase=use_rebase,
600 627 close_branch=close_branch)
601 628 return merge_state
602 629
603 630 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
604 631 pull_request.merge_rev = merge_state.merge_ref.commit_id
605 632 pull_request.updated_on = datetime.datetime.now()
606 633 close_msg = close_msg or 'Pull request merged and closed'
607 634
608 635 CommentsModel().create(
609 636 text=safe_unicode(close_msg),
610 637 repo=pull_request.target_repo.repo_id,
611 638 user=user.user_id,
612 639 pull_request=pull_request.pull_request_id,
613 640 f_path=None,
614 641 line_no=None,
615 642 closing_pr=True
616 643 )
617 644
618 645 Session().add(pull_request)
619 646 Session().flush()
620 647 # TODO: paris: replace invalidation with less radical solution
621 648 ScmModel().mark_for_invalidation(
622 649 pull_request.target_repo.repo_name)
623 650 self._trigger_pull_request_hook(pull_request, user, 'merge')
624 651
625 652 def has_valid_update_type(self, pull_request):
626 653 source_ref_type = pull_request.source_ref_parts.type
627 654 return source_ref_type in ['book', 'branch', 'tag']
628 655
629 656 def update_commits(self, pull_request):
630 657 """
631 658 Get the updated list of commits for the pull request
632 659 and return the new pull request version and the list
633 660 of commits processed by this update action
634 661 """
635 662 pull_request = self.__get_pull_request(pull_request)
636 663 source_ref_type = pull_request.source_ref_parts.type
637 664 source_ref_name = pull_request.source_ref_parts.name
638 665 source_ref_id = pull_request.source_ref_parts.commit_id
639 666
640 667 target_ref_type = pull_request.target_ref_parts.type
641 668 target_ref_name = pull_request.target_ref_parts.name
642 669 target_ref_id = pull_request.target_ref_parts.commit_id
643 670
644 671 if not self.has_valid_update_type(pull_request):
645 672 log.debug(
646 673 "Skipping update of pull request %s due to ref type: %s",
647 674 pull_request, source_ref_type)
648 675 return UpdateResponse(
649 676 executed=False,
650 677 reason=UpdateFailureReason.WRONG_REF_TYPE,
651 678 old=pull_request, new=None, changes=None,
652 679 source_changed=False, target_changed=False)
653 680
654 681 # source repo
655 682 source_repo = pull_request.source_repo.scm_instance()
656 683 try:
657 684 source_commit = source_repo.get_commit(commit_id=source_ref_name)
658 685 except CommitDoesNotExistError:
659 686 return UpdateResponse(
660 687 executed=False,
661 688 reason=UpdateFailureReason.MISSING_SOURCE_REF,
662 689 old=pull_request, new=None, changes=None,
663 690 source_changed=False, target_changed=False)
664 691
665 692 source_changed = source_ref_id != source_commit.raw_id
666 693
667 694 # target repo
668 695 target_repo = pull_request.target_repo.scm_instance()
669 696 try:
670 697 target_commit = target_repo.get_commit(commit_id=target_ref_name)
671 698 except CommitDoesNotExistError:
672 699 return UpdateResponse(
673 700 executed=False,
674 701 reason=UpdateFailureReason.MISSING_TARGET_REF,
675 702 old=pull_request, new=None, changes=None,
676 703 source_changed=False, target_changed=False)
677 704 target_changed = target_ref_id != target_commit.raw_id
678 705
679 706 if not (source_changed or target_changed):
680 707 log.debug("Nothing changed in pull request %s", pull_request)
681 708 return UpdateResponse(
682 709 executed=False,
683 710 reason=UpdateFailureReason.NO_CHANGE,
684 711 old=pull_request, new=None, changes=None,
685 712 source_changed=target_changed, target_changed=source_changed)
686 713
687 714 change_in_found = 'target repo' if target_changed else 'source repo'
688 715 log.debug('Updating pull request because of change in %s detected',
689 716 change_in_found)
690 717
691 718 # Finally there is a need for an update, in case of source change
692 719 # we create a new version, else just an update
693 720 if source_changed:
694 721 pull_request_version = self._create_version_from_snapshot(pull_request)
695 722 self._link_comments_to_version(pull_request_version)
696 723 else:
697 724 try:
698 725 ver = pull_request.versions[-1]
699 726 except IndexError:
700 727 ver = None
701 728
702 729 pull_request.pull_request_version_id = \
703 730 ver.pull_request_version_id if ver else None
704 731 pull_request_version = pull_request
705 732
706 733 try:
707 734 if target_ref_type in ('tag', 'branch', 'book'):
708 735 target_commit = target_repo.get_commit(target_ref_name)
709 736 else:
710 737 target_commit = target_repo.get_commit(target_ref_id)
711 738 except CommitDoesNotExistError:
712 739 return UpdateResponse(
713 740 executed=False,
714 741 reason=UpdateFailureReason.MISSING_TARGET_REF,
715 742 old=pull_request, new=None, changes=None,
716 743 source_changed=source_changed, target_changed=target_changed)
717 744
718 745 # re-compute commit ids
719 746 old_commit_ids = pull_request.revisions
720 747 pre_load = ["author", "branch", "date", "message"]
721 748 commit_ranges = target_repo.compare(
722 749 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
723 750 pre_load=pre_load)
724 751
725 752 ancestor = target_repo.get_common_ancestor(
726 753 target_commit.raw_id, source_commit.raw_id, source_repo)
727 754
728 755 pull_request.source_ref = '%s:%s:%s' % (
729 756 source_ref_type, source_ref_name, source_commit.raw_id)
730 757 pull_request.target_ref = '%s:%s:%s' % (
731 758 target_ref_type, target_ref_name, ancestor)
732 759
733 760 pull_request.revisions = [
734 761 commit.raw_id for commit in reversed(commit_ranges)]
735 762 pull_request.updated_on = datetime.datetime.now()
736 763 Session().add(pull_request)
737 764 new_commit_ids = pull_request.revisions
738 765
739 766 old_diff_data, new_diff_data = self._generate_update_diffs(
740 767 pull_request, pull_request_version)
741 768
742 769 # calculate commit and file changes
743 770 changes = self._calculate_commit_id_changes(
744 771 old_commit_ids, new_commit_ids)
745 772 file_changes = self._calculate_file_changes(
746 773 old_diff_data, new_diff_data)
747 774
748 775 # set comments as outdated if DIFFS changed
749 776 CommentsModel().outdate_comments(
750 777 pull_request, old_diff_data=old_diff_data,
751 778 new_diff_data=new_diff_data)
752 779
753 780 commit_changes = (changes.added or changes.removed)
754 781 file_node_changes = (
755 782 file_changes.added or file_changes.modified or file_changes.removed)
756 783 pr_has_changes = commit_changes or file_node_changes
757 784
758 785 # Add an automatic comment to the pull request, in case
759 786 # anything has changed
760 787 if pr_has_changes:
761 788 update_comment = CommentsModel().create(
762 789 text=self._render_update_message(changes, file_changes),
763 790 repo=pull_request.target_repo,
764 791 user=pull_request.author,
765 792 pull_request=pull_request,
766 793 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
767 794
768 795 # Update status to "Under Review" for added commits
769 796 for commit_id in changes.added:
770 797 ChangesetStatusModel().set_status(
771 798 repo=pull_request.source_repo,
772 799 status=ChangesetStatus.STATUS_UNDER_REVIEW,
773 800 comment=update_comment,
774 801 user=pull_request.author,
775 802 pull_request=pull_request,
776 803 revision=commit_id)
777 804
778 805 log.debug(
779 806 'Updated pull request %s, added_ids: %s, common_ids: %s, '
780 807 'removed_ids: %s', pull_request.pull_request_id,
781 808 changes.added, changes.common, changes.removed)
782 809 log.debug(
783 810 'Updated pull request with the following file changes: %s',
784 811 file_changes)
785 812
786 813 log.info(
787 814 "Updated pull request %s from commit %s to commit %s, "
788 815 "stored new version %s of this pull request.",
789 816 pull_request.pull_request_id, source_ref_id,
790 817 pull_request.source_ref_parts.commit_id,
791 818 pull_request_version.pull_request_version_id)
792 819 Session().commit()
793 820 self._trigger_pull_request_hook(
794 821 pull_request, pull_request.author, 'update')
795 822
796 823 return UpdateResponse(
797 824 executed=True, reason=UpdateFailureReason.NONE,
798 825 old=pull_request, new=pull_request_version, changes=changes,
799 826 source_changed=source_changed, target_changed=target_changed)
800 827
801 828 def _create_version_from_snapshot(self, pull_request):
802 829 version = PullRequestVersion()
803 830 version.title = pull_request.title
804 831 version.description = pull_request.description
805 832 version.status = pull_request.status
806 833 version.created_on = datetime.datetime.now()
807 834 version.updated_on = pull_request.updated_on
808 835 version.user_id = pull_request.user_id
809 836 version.source_repo = pull_request.source_repo
810 837 version.source_ref = pull_request.source_ref
811 838 version.target_repo = pull_request.target_repo
812 839 version.target_ref = pull_request.target_ref
813 840
814 841 version._last_merge_source_rev = pull_request._last_merge_source_rev
815 842 version._last_merge_target_rev = pull_request._last_merge_target_rev
816 843 version.last_merge_status = pull_request.last_merge_status
817 844 version.shadow_merge_ref = pull_request.shadow_merge_ref
818 845 version.merge_rev = pull_request.merge_rev
819 846 version.reviewer_data = pull_request.reviewer_data
820 847
821 848 version.revisions = pull_request.revisions
822 849 version.pull_request = pull_request
823 850 Session().add(version)
824 851 Session().flush()
825 852
826 853 return version
827 854
828 855 def _generate_update_diffs(self, pull_request, pull_request_version):
829 856
830 857 diff_context = (
831 858 self.DIFF_CONTEXT +
832 859 CommentsModel.needed_extra_diff_context())
833 860
834 861 source_repo = pull_request_version.source_repo
835 862 source_ref_id = pull_request_version.source_ref_parts.commit_id
836 863 target_ref_id = pull_request_version.target_ref_parts.commit_id
837 864 old_diff = self._get_diff_from_pr_or_version(
838 865 source_repo, source_ref_id, target_ref_id, context=diff_context)
839 866
840 867 source_repo = pull_request.source_repo
841 868 source_ref_id = pull_request.source_ref_parts.commit_id
842 869 target_ref_id = pull_request.target_ref_parts.commit_id
843 870
844 871 new_diff = self._get_diff_from_pr_or_version(
845 872 source_repo, source_ref_id, target_ref_id, context=diff_context)
846 873
847 874 old_diff_data = diffs.DiffProcessor(old_diff)
848 875 old_diff_data.prepare()
849 876 new_diff_data = diffs.DiffProcessor(new_diff)
850 877 new_diff_data.prepare()
851 878
852 879 return old_diff_data, new_diff_data
853 880
854 881 def _link_comments_to_version(self, pull_request_version):
855 882 """
856 883 Link all unlinked comments of this pull request to the given version.
857 884
858 885 :param pull_request_version: The `PullRequestVersion` to which
859 886 the comments shall be linked.
860 887
861 888 """
862 889 pull_request = pull_request_version.pull_request
863 890 comments = ChangesetComment.query()\
864 891 .filter(
865 892 # TODO: johbo: Should we query for the repo at all here?
866 893 # Pending decision on how comments of PRs are to be related
867 894 # to either the source repo, the target repo or no repo at all.
868 895 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
869 896 ChangesetComment.pull_request == pull_request,
870 897 ChangesetComment.pull_request_version == None)\
871 898 .order_by(ChangesetComment.comment_id.asc())
872 899
873 900 # TODO: johbo: Find out why this breaks if it is done in a bulk
874 901 # operation.
875 902 for comment in comments:
876 903 comment.pull_request_version_id = (
877 904 pull_request_version.pull_request_version_id)
878 905 Session().add(comment)
879 906
880 907 def _calculate_commit_id_changes(self, old_ids, new_ids):
881 908 added = [x for x in new_ids if x not in old_ids]
882 909 common = [x for x in new_ids if x in old_ids]
883 910 removed = [x for x in old_ids if x not in new_ids]
884 911 total = new_ids
885 912 return ChangeTuple(added, common, removed, total)
886 913
887 914 def _calculate_file_changes(self, old_diff_data, new_diff_data):
888 915
889 916 old_files = OrderedDict()
890 917 for diff_data in old_diff_data.parsed_diff:
891 918 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
892 919
893 920 added_files = []
894 921 modified_files = []
895 922 removed_files = []
896 923 for diff_data in new_diff_data.parsed_diff:
897 924 new_filename = diff_data['filename']
898 925 new_hash = md5_safe(diff_data['raw_diff'])
899 926
900 927 old_hash = old_files.get(new_filename)
901 928 if not old_hash:
902 929 # file is not present in old diff, means it's added
903 930 added_files.append(new_filename)
904 931 else:
905 932 if new_hash != old_hash:
906 933 modified_files.append(new_filename)
907 934 # now remove a file from old, since we have seen it already
908 935 del old_files[new_filename]
909 936
910 937 # removed files is when there are present in old, but not in NEW,
911 938 # since we remove old files that are present in new diff, left-overs
912 939 # if any should be the removed files
913 940 removed_files.extend(old_files.keys())
914 941
915 942 return FileChangeTuple(added_files, modified_files, removed_files)
916 943
917 944 def _render_update_message(self, changes, file_changes):
918 945 """
919 946 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
920 947 so it's always looking the same disregarding on which default
921 948 renderer system is using.
922 949
923 950 :param changes: changes named tuple
924 951 :param file_changes: file changes named tuple
925 952
926 953 """
927 954 new_status = ChangesetStatus.get_status_lbl(
928 955 ChangesetStatus.STATUS_UNDER_REVIEW)
929 956
930 957 changed_files = (
931 958 file_changes.added + file_changes.modified + file_changes.removed)
932 959
933 960 params = {
934 961 'under_review_label': new_status,
935 962 'added_commits': changes.added,
936 963 'removed_commits': changes.removed,
937 964 'changed_files': changed_files,
938 965 'added_files': file_changes.added,
939 966 'modified_files': file_changes.modified,
940 967 'removed_files': file_changes.removed,
941 968 }
942 969 renderer = RstTemplateRenderer()
943 970 return renderer.render('pull_request_update.mako', **params)
944 971
945 972 def edit(self, pull_request, title, description, user):
946 973 pull_request = self.__get_pull_request(pull_request)
947 974 old_data = pull_request.get_api_data(with_merge_state=False)
948 975 if pull_request.is_closed():
949 976 raise ValueError('This pull request is closed')
950 977 if title:
951 978 pull_request.title = title
952 979 pull_request.description = description
953 980 pull_request.updated_on = datetime.datetime.now()
954 981 Session().add(pull_request)
955 982 self._log_audit_action(
956 983 'repo.pull_request.edit', {'old_data': old_data},
957 984 user, pull_request)
958 985
959 986 def update_reviewers(self, pull_request, reviewer_data, user):
960 987 """
961 988 Update the reviewers in the pull request
962 989
963 990 :param pull_request: the pr to update
964 991 :param reviewer_data: list of tuples
965 [(user, ['reason1', 'reason2'], mandatory_flag)]
992 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
966 993 """
967 994 pull_request = self.__get_pull_request(pull_request)
968 995 if pull_request.is_closed():
969 996 raise ValueError('This pull request is closed')
970 997
971 998 reviewers = {}
972 for user_id, reasons, mandatory in reviewer_data:
999 for user_id, reasons, mandatory, rules in reviewer_data:
973 1000 if isinstance(user_id, (int, basestring)):
974 1001 user_id = self._get_user(user_id).user_id
975 1002 reviewers[user_id] = {
976 1003 'reasons': reasons, 'mandatory': mandatory}
977 1004
978 1005 reviewers_ids = set(reviewers.keys())
979 1006 current_reviewers = PullRequestReviewers.query()\
980 1007 .filter(PullRequestReviewers.pull_request ==
981 1008 pull_request).all()
982 1009 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
983 1010
984 1011 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
985 1012 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
986 1013
987 1014 log.debug("Adding %s reviewers", ids_to_add)
988 1015 log.debug("Removing %s reviewers", ids_to_remove)
989 1016 changed = False
990 1017 for uid in ids_to_add:
991 1018 changed = True
992 1019 _usr = self._get_user(uid)
993 1020 reviewer = PullRequestReviewers()
994 1021 reviewer.user = _usr
995 1022 reviewer.pull_request = pull_request
996 1023 reviewer.reasons = reviewers[uid]['reasons']
997 1024 # NOTE(marcink): mandatory shouldn't be changed now
998 1025 # reviewer.mandatory = reviewers[uid]['reasons']
999 1026 Session().add(reviewer)
1000 1027 self._log_audit_action(
1001 1028 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1002 1029 user, pull_request)
1003 1030
1004 1031 for uid in ids_to_remove:
1005 1032 changed = True
1006 1033 reviewers = PullRequestReviewers.query()\
1007 1034 .filter(PullRequestReviewers.user_id == uid,
1008 1035 PullRequestReviewers.pull_request == pull_request)\
1009 1036 .all()
1010 1037 # use .all() in case we accidentally added the same person twice
1011 1038 # this CAN happen due to the lack of DB checks
1012 1039 for obj in reviewers:
1013 1040 old_data = obj.get_dict()
1014 1041 Session().delete(obj)
1015 1042 self._log_audit_action(
1016 1043 'repo.pull_request.reviewer.delete',
1017 1044 {'old_data': old_data}, user, pull_request)
1018 1045
1019 1046 if changed:
1020 1047 pull_request.updated_on = datetime.datetime.now()
1021 1048 Session().add(pull_request)
1022 1049
1023 1050 self.notify_reviewers(pull_request, ids_to_add)
1024 1051 return ids_to_add, ids_to_remove
1025 1052
1026 1053 def get_url(self, pull_request, request=None, permalink=False):
1027 1054 if not request:
1028 1055 request = get_current_request()
1029 1056
1030 1057 if permalink:
1031 1058 return request.route_url(
1032 1059 'pull_requests_global',
1033 1060 pull_request_id=pull_request.pull_request_id,)
1034 1061 else:
1035 1062 return request.route_url('pullrequest_show',
1036 1063 repo_name=safe_str(pull_request.target_repo.repo_name),
1037 1064 pull_request_id=pull_request.pull_request_id,)
1038 1065
1039 1066 def get_shadow_clone_url(self, pull_request):
1040 1067 """
1041 1068 Returns qualified url pointing to the shadow repository. If this pull
1042 1069 request is closed there is no shadow repository and ``None`` will be
1043 1070 returned.
1044 1071 """
1045 1072 if pull_request.is_closed():
1046 1073 return None
1047 1074 else:
1048 1075 pr_url = urllib.unquote(self.get_url(pull_request))
1049 1076 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1050 1077
1051 1078 def notify_reviewers(self, pull_request, reviewers_ids):
1052 1079 # notification to reviewers
1053 1080 if not reviewers_ids:
1054 1081 return
1055 1082
1056 1083 pull_request_obj = pull_request
1057 1084 # get the current participants of this pull request
1058 1085 recipients = reviewers_ids
1059 1086 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1060 1087
1061 1088 pr_source_repo = pull_request_obj.source_repo
1062 1089 pr_target_repo = pull_request_obj.target_repo
1063 1090
1064 1091 pr_url = h.route_url('pullrequest_show',
1065 1092 repo_name=pr_target_repo.repo_name,
1066 1093 pull_request_id=pull_request_obj.pull_request_id,)
1067 1094
1068 1095 # set some variables for email notification
1069 1096 pr_target_repo_url = h.route_url(
1070 1097 'repo_summary', repo_name=pr_target_repo.repo_name)
1071 1098
1072 1099 pr_source_repo_url = h.route_url(
1073 1100 'repo_summary', repo_name=pr_source_repo.repo_name)
1074 1101
1075 1102 # pull request specifics
1076 1103 pull_request_commits = [
1077 1104 (x.raw_id, x.message)
1078 1105 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1079 1106
1080 1107 kwargs = {
1081 1108 'user': pull_request.author,
1082 1109 'pull_request': pull_request_obj,
1083 1110 'pull_request_commits': pull_request_commits,
1084 1111
1085 1112 'pull_request_target_repo': pr_target_repo,
1086 1113 'pull_request_target_repo_url': pr_target_repo_url,
1087 1114
1088 1115 'pull_request_source_repo': pr_source_repo,
1089 1116 'pull_request_source_repo_url': pr_source_repo_url,
1090 1117
1091 1118 'pull_request_url': pr_url,
1092 1119 }
1093 1120
1094 1121 # pre-generate the subject for notification itself
1095 1122 (subject,
1096 1123 _h, _e, # we don't care about those
1097 1124 body_plaintext) = EmailNotificationModel().render_email(
1098 1125 notification_type, **kwargs)
1099 1126
1100 1127 # create notification objects, and emails
1101 1128 NotificationModel().create(
1102 1129 created_by=pull_request.author,
1103 1130 notification_subject=subject,
1104 1131 notification_body=body_plaintext,
1105 1132 notification_type=notification_type,
1106 1133 recipients=recipients,
1107 1134 email_kwargs=kwargs,
1108 1135 )
1109 1136
1110 1137 def delete(self, pull_request, user):
1111 1138 pull_request = self.__get_pull_request(pull_request)
1112 1139 old_data = pull_request.get_api_data(with_merge_state=False)
1113 1140 self._cleanup_merge_workspace(pull_request)
1114 1141 self._log_audit_action(
1115 1142 'repo.pull_request.delete', {'old_data': old_data},
1116 1143 user, pull_request)
1117 1144 Session().delete(pull_request)
1118 1145
1119 1146 def close_pull_request(self, pull_request, user):
1120 1147 pull_request = self.__get_pull_request(pull_request)
1121 1148 self._cleanup_merge_workspace(pull_request)
1122 1149 pull_request.status = PullRequest.STATUS_CLOSED
1123 1150 pull_request.updated_on = datetime.datetime.now()
1124 1151 Session().add(pull_request)
1125 1152 self._trigger_pull_request_hook(
1126 1153 pull_request, pull_request.author, 'close')
1127 1154
1128 1155 pr_data = pull_request.get_api_data(with_merge_state=False)
1129 1156 self._log_audit_action(
1130 1157 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1131 1158
1132 1159 def close_pull_request_with_comment(
1133 1160 self, pull_request, user, repo, message=None):
1134 1161
1135 1162 pull_request_review_status = pull_request.calculated_review_status()
1136 1163
1137 1164 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1138 1165 # approved only if we have voting consent
1139 1166 status = ChangesetStatus.STATUS_APPROVED
1140 1167 else:
1141 1168 status = ChangesetStatus.STATUS_REJECTED
1142 1169 status_lbl = ChangesetStatus.get_status_lbl(status)
1143 1170
1144 1171 default_message = (
1145 1172 'Closing with status change {transition_icon} {status}.'
1146 1173 ).format(transition_icon='>', status=status_lbl)
1147 1174 text = message or default_message
1148 1175
1149 1176 # create a comment, and link it to new status
1150 1177 comment = CommentsModel().create(
1151 1178 text=text,
1152 1179 repo=repo.repo_id,
1153 1180 user=user.user_id,
1154 1181 pull_request=pull_request.pull_request_id,
1155 1182 status_change=status_lbl,
1156 1183 status_change_type=status,
1157 1184 closing_pr=True
1158 1185 )
1159 1186
1160 1187 # calculate old status before we change it
1161 1188 old_calculated_status = pull_request.calculated_review_status()
1162 1189 ChangesetStatusModel().set_status(
1163 1190 repo.repo_id,
1164 1191 status,
1165 1192 user.user_id,
1166 1193 comment=comment,
1167 1194 pull_request=pull_request.pull_request_id
1168 1195 )
1169 1196
1170 1197 Session().flush()
1171 1198 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1172 1199 # we now calculate the status of pull request again, and based on that
1173 1200 # calculation trigger status change. This might happen in cases
1174 1201 # that non-reviewer admin closes a pr, which means his vote doesn't
1175 1202 # change the status, while if he's a reviewer this might change it.
1176 1203 calculated_status = pull_request.calculated_review_status()
1177 1204 if old_calculated_status != calculated_status:
1178 1205 self._trigger_pull_request_hook(
1179 1206 pull_request, user, 'review_status_change')
1180 1207
1181 1208 # finally close the PR
1182 1209 PullRequestModel().close_pull_request(
1183 1210 pull_request.pull_request_id, user)
1184 1211
1185 1212 return comment, status
1186 1213
1187 1214 def merge_status(self, pull_request, translator=None):
1188 1215 _ = translator or get_current_request().translate
1189 1216
1190 1217 if not self._is_merge_enabled(pull_request):
1191 1218 return False, _('Server-side pull request merging is disabled.')
1192 1219 if pull_request.is_closed():
1193 1220 return False, _('This pull request is closed.')
1194 1221 merge_possible, msg = self._check_repo_requirements(
1195 1222 target=pull_request.target_repo, source=pull_request.source_repo,
1196 1223 translator=_)
1197 1224 if not merge_possible:
1198 1225 return merge_possible, msg
1199 1226
1200 1227 try:
1201 1228 resp = self._try_merge(pull_request)
1202 1229 log.debug("Merge response: %s", resp)
1203 1230 status = resp.possible, self.merge_status_message(
1204 1231 resp.failure_reason)
1205 1232 except NotImplementedError:
1206 1233 status = False, _('Pull request merging is not supported.')
1207 1234
1208 1235 return status
1209 1236
1210 1237 def _check_repo_requirements(self, target, source, translator):
1211 1238 """
1212 1239 Check if `target` and `source` have compatible requirements.
1213 1240
1214 1241 Currently this is just checking for largefiles.
1215 1242 """
1216 1243 _ = translator
1217 1244 target_has_largefiles = self._has_largefiles(target)
1218 1245 source_has_largefiles = self._has_largefiles(source)
1219 1246 merge_possible = True
1220 1247 message = u''
1221 1248
1222 1249 if target_has_largefiles != source_has_largefiles:
1223 1250 merge_possible = False
1224 1251 if source_has_largefiles:
1225 1252 message = _(
1226 1253 'Target repository large files support is disabled.')
1227 1254 else:
1228 1255 message = _(
1229 1256 'Source repository large files support is disabled.')
1230 1257
1231 1258 return merge_possible, message
1232 1259
1233 1260 def _has_largefiles(self, repo):
1234 1261 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1235 1262 'extensions', 'largefiles')
1236 1263 return largefiles_ui and largefiles_ui[0].active
1237 1264
1238 1265 def _try_merge(self, pull_request):
1239 1266 """
1240 1267 Try to merge the pull request and return the merge status.
1241 1268 """
1242 1269 log.debug(
1243 1270 "Trying out if the pull request %s can be merged.",
1244 1271 pull_request.pull_request_id)
1245 1272 target_vcs = pull_request.target_repo.scm_instance()
1246 1273
1247 1274 # Refresh the target reference.
1248 1275 try:
1249 1276 target_ref = self._refresh_reference(
1250 1277 pull_request.target_ref_parts, target_vcs)
1251 1278 except CommitDoesNotExistError:
1252 1279 merge_state = MergeResponse(
1253 1280 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1254 1281 return merge_state
1255 1282
1256 1283 target_locked = pull_request.target_repo.locked
1257 1284 if target_locked and target_locked[0]:
1258 1285 log.debug("The target repository is locked.")
1259 1286 merge_state = MergeResponse(
1260 1287 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1261 1288 elif self._needs_merge_state_refresh(pull_request, target_ref):
1262 1289 log.debug("Refreshing the merge status of the repository.")
1263 1290 merge_state = self._refresh_merge_state(
1264 1291 pull_request, target_vcs, target_ref)
1265 1292 else:
1266 1293 possible = pull_request.\
1267 1294 last_merge_status == MergeFailureReason.NONE
1268 1295 merge_state = MergeResponse(
1269 1296 possible, False, None, pull_request.last_merge_status)
1270 1297
1271 1298 return merge_state
1272 1299
1273 1300 def _refresh_reference(self, reference, vcs_repository):
1274 1301 if reference.type in ('branch', 'book'):
1275 1302 name_or_id = reference.name
1276 1303 else:
1277 1304 name_or_id = reference.commit_id
1278 1305 refreshed_commit = vcs_repository.get_commit(name_or_id)
1279 1306 refreshed_reference = Reference(
1280 1307 reference.type, reference.name, refreshed_commit.raw_id)
1281 1308 return refreshed_reference
1282 1309
1283 1310 def _needs_merge_state_refresh(self, pull_request, target_reference):
1284 1311 return not(
1285 1312 pull_request.revisions and
1286 1313 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1287 1314 target_reference.commit_id == pull_request._last_merge_target_rev)
1288 1315
1289 1316 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1290 1317 workspace_id = self._workspace_id(pull_request)
1291 1318 source_vcs = pull_request.source_repo.scm_instance()
1292 1319 use_rebase = self._use_rebase_for_merging(pull_request)
1293 1320 close_branch = self._close_branch_before_merging(pull_request)
1294 1321 merge_state = target_vcs.merge(
1295 1322 target_reference, source_vcs, pull_request.source_ref_parts,
1296 1323 workspace_id, dry_run=True, use_rebase=use_rebase,
1297 1324 close_branch=close_branch)
1298 1325
1299 1326 # Do not store the response if there was an unknown error.
1300 1327 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1301 1328 pull_request._last_merge_source_rev = \
1302 1329 pull_request.source_ref_parts.commit_id
1303 1330 pull_request._last_merge_target_rev = target_reference.commit_id
1304 1331 pull_request.last_merge_status = merge_state.failure_reason
1305 1332 pull_request.shadow_merge_ref = merge_state.merge_ref
1306 1333 Session().add(pull_request)
1307 1334 Session().commit()
1308 1335
1309 1336 return merge_state
1310 1337
1311 1338 def _workspace_id(self, pull_request):
1312 1339 workspace_id = 'pr-%s' % pull_request.pull_request_id
1313 1340 return workspace_id
1314 1341
1315 1342 def merge_status_message(self, status_code):
1316 1343 """
1317 1344 Return a human friendly error message for the given merge status code.
1318 1345 """
1319 1346 return self.MERGE_STATUS_MESSAGES[status_code]
1320 1347
1321 1348 def generate_repo_data(self, repo, commit_id=None, branch=None,
1322 1349 bookmark=None, translator=None):
1323 1350 from rhodecode.model.repo import RepoModel
1324 1351
1325 1352 all_refs, selected_ref = \
1326 1353 self._get_repo_pullrequest_sources(
1327 1354 repo.scm_instance(), commit_id=commit_id,
1328 1355 branch=branch, bookmark=bookmark, translator=translator)
1329 1356
1330 1357 refs_select2 = []
1331 1358 for element in all_refs:
1332 1359 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1333 1360 refs_select2.append({'text': element[1], 'children': children})
1334 1361
1335 1362 return {
1336 1363 'user': {
1337 1364 'user_id': repo.user.user_id,
1338 1365 'username': repo.user.username,
1339 1366 'firstname': repo.user.first_name,
1340 1367 'lastname': repo.user.last_name,
1341 1368 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1342 1369 },
1343 1370 'name': repo.repo_name,
1344 1371 'link': RepoModel().get_url(repo),
1345 1372 'description': h.chop_at_smart(repo.description_safe, '\n'),
1346 1373 'refs': {
1347 1374 'all_refs': all_refs,
1348 1375 'selected_ref': selected_ref,
1349 1376 'select2_refs': refs_select2
1350 1377 }
1351 1378 }
1352 1379
1353 1380 def generate_pullrequest_title(self, source, source_ref, target):
1354 1381 return u'{source}#{at_ref} to {target}'.format(
1355 1382 source=source,
1356 1383 at_ref=source_ref,
1357 1384 target=target,
1358 1385 )
1359 1386
1360 1387 def _cleanup_merge_workspace(self, pull_request):
1361 1388 # Merging related cleanup
1362 1389 target_scm = pull_request.target_repo.scm_instance()
1363 1390 workspace_id = 'pr-%s' % pull_request.pull_request_id
1364 1391
1365 1392 try:
1366 1393 target_scm.cleanup_merge_workspace(workspace_id)
1367 1394 except NotImplementedError:
1368 1395 pass
1369 1396
1370 1397 def _get_repo_pullrequest_sources(
1371 1398 self, repo, commit_id=None, branch=None, bookmark=None,
1372 1399 translator=None):
1373 1400 """
1374 1401 Return a structure with repo's interesting commits, suitable for
1375 1402 the selectors in pullrequest controller
1376 1403
1377 1404 :param commit_id: a commit that must be in the list somehow
1378 1405 and selected by default
1379 1406 :param branch: a branch that must be in the list and selected
1380 1407 by default - even if closed
1381 1408 :param bookmark: a bookmark that must be in the list and selected
1382 1409 """
1383 1410 _ = translator or get_current_request().translate
1384 1411
1385 1412 commit_id = safe_str(commit_id) if commit_id else None
1386 1413 branch = safe_str(branch) if branch else None
1387 1414 bookmark = safe_str(bookmark) if bookmark else None
1388 1415
1389 1416 selected = None
1390 1417
1391 1418 # order matters: first source that has commit_id in it will be selected
1392 1419 sources = []
1393 1420 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1394 1421 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1395 1422
1396 1423 if commit_id:
1397 1424 ref_commit = (h.short_id(commit_id), commit_id)
1398 1425 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1399 1426
1400 1427 sources.append(
1401 1428 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1402 1429 )
1403 1430
1404 1431 groups = []
1405 1432 for group_key, ref_list, group_name, match in sources:
1406 1433 group_refs = []
1407 1434 for ref_name, ref_id in ref_list:
1408 1435 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1409 1436 group_refs.append((ref_key, ref_name))
1410 1437
1411 1438 if not selected:
1412 1439 if set([commit_id, match]) & set([ref_id, ref_name]):
1413 1440 selected = ref_key
1414 1441
1415 1442 if group_refs:
1416 1443 groups.append((group_refs, group_name))
1417 1444
1418 1445 if not selected:
1419 1446 ref = commit_id or branch or bookmark
1420 1447 if ref:
1421 1448 raise CommitDoesNotExistError(
1422 1449 'No commit refs could be found matching: %s' % ref)
1423 1450 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1424 1451 selected = 'branch:%s:%s' % (
1425 1452 repo.DEFAULT_BRANCH_NAME,
1426 1453 repo.branches[repo.DEFAULT_BRANCH_NAME]
1427 1454 )
1428 1455 elif repo.commit_ids:
1429 1456 # make the user select in this case
1430 1457 selected = None
1431 1458 else:
1432 1459 raise EmptyRepositoryError()
1433 1460 return groups, selected
1434 1461
1435 1462 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1436 1463 return self._get_diff_from_pr_or_version(
1437 1464 source_repo, source_ref_id, target_ref_id, context=context)
1438 1465
1439 1466 def _get_diff_from_pr_or_version(
1440 1467 self, source_repo, source_ref_id, target_ref_id, context):
1441 1468 target_commit = source_repo.get_commit(
1442 1469 commit_id=safe_str(target_ref_id))
1443 1470 source_commit = source_repo.get_commit(
1444 1471 commit_id=safe_str(source_ref_id))
1445 1472 if isinstance(source_repo, Repository):
1446 1473 vcs_repo = source_repo.scm_instance()
1447 1474 else:
1448 1475 vcs_repo = source_repo
1449 1476
1450 1477 # TODO: johbo: In the context of an update, we cannot reach
1451 1478 # the old commit anymore with our normal mechanisms. It needs
1452 1479 # some sort of special support in the vcs layer to avoid this
1453 1480 # workaround.
1454 1481 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1455 1482 vcs_repo.alias == 'git'):
1456 1483 source_commit.raw_id = safe_str(source_ref_id)
1457 1484
1458 1485 log.debug('calculating diff between '
1459 1486 'source_ref:%s and target_ref:%s for repo `%s`',
1460 1487 target_ref_id, source_ref_id,
1461 1488 safe_unicode(vcs_repo.path))
1462 1489
1463 1490 vcs_diff = vcs_repo.get_diff(
1464 1491 commit1=target_commit, commit2=source_commit, context=context)
1465 1492 return vcs_diff
1466 1493
1467 1494 def _is_merge_enabled(self, pull_request):
1468 1495 return self._get_general_setting(
1469 1496 pull_request, 'rhodecode_pr_merge_enabled')
1470 1497
1471 1498 def _use_rebase_for_merging(self, pull_request):
1472 1499 repo_type = pull_request.target_repo.repo_type
1473 1500 if repo_type == 'hg':
1474 1501 return self._get_general_setting(
1475 1502 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1476 1503 elif repo_type == 'git':
1477 1504 return self._get_general_setting(
1478 1505 pull_request, 'rhodecode_git_use_rebase_for_merging')
1479 1506
1480 1507 return False
1481 1508
1482 1509 def _close_branch_before_merging(self, pull_request):
1483 1510 repo_type = pull_request.target_repo.repo_type
1484 1511 if repo_type == 'hg':
1485 1512 return self._get_general_setting(
1486 1513 pull_request, 'rhodecode_hg_close_branch_before_merging')
1487 1514 elif repo_type == 'git':
1488 1515 return self._get_general_setting(
1489 1516 pull_request, 'rhodecode_git_close_branch_before_merging')
1490 1517
1491 1518 return False
1492 1519
1493 1520 def _get_general_setting(self, pull_request, settings_key, default=False):
1494 1521 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1495 1522 settings = settings_model.get_general_settings()
1496 1523 return settings.get(settings_key, default)
1497 1524
1498 1525 def _log_audit_action(self, action, action_data, user, pull_request):
1499 1526 audit_logger.store(
1500 1527 action=action,
1501 1528 action_data=action_data,
1502 1529 user=user,
1503 1530 repo=pull_request.target_repo)
1504 1531
1505 1532 def get_reviewer_functions(self):
1506 1533 """
1507 1534 Fetches functions for validation and fetching default reviewers.
1508 1535 If available we use the EE package, else we fallback to CE
1509 1536 package functions
1510 1537 """
1511 1538 try:
1512 1539 from rc_reviewers.utils import get_default_reviewers_data
1513 1540 from rc_reviewers.utils import validate_default_reviewers
1514 1541 except ImportError:
1515 1542 from rhodecode.apps.repository.utils import \
1516 1543 get_default_reviewers_data
1517 1544 from rhodecode.apps.repository.utils import \
1518 1545 validate_default_reviewers
1519 1546
1520 1547 return get_default_reviewers_data, validate_default_reviewers
1521 1548
1522 1549
1523 1550 class MergeCheck(object):
1524 1551 """
1525 1552 Perform Merge Checks and returns a check object which stores information
1526 1553 about merge errors, and merge conditions
1527 1554 """
1528 1555 TODO_CHECK = 'todo'
1529 1556 PERM_CHECK = 'perm'
1530 1557 REVIEW_CHECK = 'review'
1531 1558 MERGE_CHECK = 'merge'
1532 1559
1533 1560 def __init__(self):
1534 1561 self.review_status = None
1535 1562 self.merge_possible = None
1536 1563 self.merge_msg = ''
1537 1564 self.failed = None
1538 1565 self.errors = []
1539 1566 self.error_details = OrderedDict()
1540 1567
1541 1568 def push_error(self, error_type, message, error_key, details):
1542 1569 self.failed = True
1543 1570 self.errors.append([error_type, message])
1544 1571 self.error_details[error_key] = dict(
1545 1572 details=details,
1546 1573 error_type=error_type,
1547 1574 message=message
1548 1575 )
1549 1576
1550 1577 @classmethod
1551 1578 def validate(cls, pull_request, user, translator, fail_early=False):
1552 1579 _ = translator
1553 1580 merge_check = cls()
1554 1581
1555 1582 # permissions to merge
1556 1583 user_allowed_to_merge = PullRequestModel().check_user_merge(
1557 1584 pull_request, user)
1558 1585 if not user_allowed_to_merge:
1559 1586 log.debug("MergeCheck: cannot merge, approval is pending.")
1560 1587
1561 1588 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1562 1589 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1563 1590 if fail_early:
1564 1591 return merge_check
1565 1592
1566 1593 # review status, must be always present
1567 1594 review_status = pull_request.calculated_review_status()
1568 1595 merge_check.review_status = review_status
1569 1596
1570 1597 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1571 1598 if not status_approved:
1572 1599 log.debug("MergeCheck: cannot merge, approval is pending.")
1573 1600
1574 1601 msg = _('Pull request reviewer approval is pending.')
1575 1602
1576 1603 merge_check.push_error(
1577 1604 'warning', msg, cls.REVIEW_CHECK, review_status)
1578 1605
1579 1606 if fail_early:
1580 1607 return merge_check
1581 1608
1582 1609 # left over TODOs
1583 1610 todos = CommentsModel().get_unresolved_todos(pull_request)
1584 1611 if todos:
1585 1612 log.debug("MergeCheck: cannot merge, {} "
1586 1613 "unresolved todos left.".format(len(todos)))
1587 1614
1588 1615 if len(todos) == 1:
1589 1616 msg = _('Cannot merge, {} TODO still not resolved.').format(
1590 1617 len(todos))
1591 1618 else:
1592 1619 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1593 1620 len(todos))
1594 1621
1595 1622 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1596 1623
1597 1624 if fail_early:
1598 1625 return merge_check
1599 1626
1600 1627 # merge possible
1601 1628 merge_status, msg = PullRequestModel().merge_status(
1602 1629 pull_request, translator=translator)
1603 1630 merge_check.merge_possible = merge_status
1604 1631 merge_check.merge_msg = msg
1605 1632 if not merge_status:
1606 1633 log.debug(
1607 1634 "MergeCheck: cannot merge, pull request merge not possible.")
1608 1635 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1609 1636
1610 1637 if fail_early:
1611 1638 return merge_check
1612 1639
1613 1640 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1614 1641 return merge_check
1615 1642
1616 1643 @classmethod
1617 1644 def get_merge_conditions(cls, pull_request, translator):
1618 1645 _ = translator
1619 1646 merge_details = {}
1620 1647
1621 1648 model = PullRequestModel()
1622 1649 use_rebase = model._use_rebase_for_merging(pull_request)
1623 1650
1624 1651 if use_rebase:
1625 1652 merge_details['merge_strategy'] = dict(
1626 1653 details={},
1627 1654 message=_('Merge strategy: rebase')
1628 1655 )
1629 1656 else:
1630 1657 merge_details['merge_strategy'] = dict(
1631 1658 details={},
1632 1659 message=_('Merge strategy: explicit merge commit')
1633 1660 )
1634 1661
1635 1662 close_branch = model._close_branch_before_merging(pull_request)
1636 1663 if close_branch:
1637 1664 repo_type = pull_request.target_repo.repo_type
1638 1665 if repo_type == 'hg':
1639 1666 close_msg = _('Source branch will be closed after merge.')
1640 1667 elif repo_type == 'git':
1641 1668 close_msg = _('Source branch will be deleted after merge.')
1642 1669
1643 1670 merge_details['close_branch'] = dict(
1644 1671 details={},
1645 1672 message=close_msg
1646 1673 )
1647 1674
1648 1675 return merge_details
1649 1676
1650 1677 ChangeTuple = collections.namedtuple(
1651 1678 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1652 1679
1653 1680 FileChangeTuple = collections.namedtuple(
1654 1681 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,913 +1,914 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 users model for RhodeCode
23 23 """
24 24
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28 import ipaddress
29 29
30 30 from pyramid.threadlocal import get_current_request
31 31 from sqlalchemy.exc import DatabaseError
32 32
33 33 from rhodecode import events
34 34 from rhodecode.lib.user_log_filter import user_log_filter
35 35 from rhodecode.lib.utils2 import (
36 36 safe_unicode, get_current_rhodecode_user, action_logger_generic,
37 37 AttributeDict, str2bool)
38 38 from rhodecode.lib.exceptions import (
39 39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
40 40 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
41 41 from rhodecode.lib.caching_query import FromCache
42 42 from rhodecode.model import BaseModel
43 43 from rhodecode.model.auth_token import AuthTokenModel
44 44 from rhodecode.model.db import (
45 45 _hash_key, true, false, or_, joinedload, User, UserToPerm,
46 46 UserEmailMap, UserIpMap, UserLog)
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.repo_group import RepoGroupModel
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class UserModel(BaseModel):
55 55 cls = User
56 56
57 57 def get(self, user_id, cache=False):
58 58 user = self.sa.query(User)
59 59 if cache:
60 60 user = user.options(
61 61 FromCache("sql_cache_short", "get_user_%s" % user_id))
62 62 return user.get(user_id)
63 63
64 64 def get_user(self, user):
65 65 return self._get_user(user)
66 66
67 67 def _serialize_user(self, user):
68 68 import rhodecode.lib.helpers as h
69 69
70 70 return {
71 71 'id': user.user_id,
72 72 'first_name': user.first_name,
73 73 'last_name': user.last_name,
74 74 'username': user.username,
75 75 'email': user.email,
76 76 'icon_link': h.gravatar_url(user.email, 30),
77 'profile_link': h.link_to_user(user),
77 78 'value_display': h.escape(h.person(user)),
78 79 'value': user.username,
79 80 'value_type': 'user',
80 81 'active': user.active,
81 82 }
82 83
83 84 def get_users(self, name_contains=None, limit=20, only_active=True):
84 85
85 86 query = self.sa.query(User)
86 87 if only_active:
87 88 query = query.filter(User.active == true())
88 89
89 90 if name_contains:
90 91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
91 92 query = query.filter(
92 93 or_(
93 94 User.name.ilike(ilike_expression),
94 95 User.lastname.ilike(ilike_expression),
95 96 User.username.ilike(ilike_expression)
96 97 )
97 98 )
98 99 query = query.limit(limit)
99 100 users = query.all()
100 101
101 102 _users = [
102 103 self._serialize_user(user) for user in users
103 104 ]
104 105 return _users
105 106
106 107 def get_by_username(self, username, cache=False, case_insensitive=False):
107 108
108 109 if case_insensitive:
109 110 user = self.sa.query(User).filter(User.username.ilike(username))
110 111 else:
111 112 user = self.sa.query(User)\
112 113 .filter(User.username == username)
113 114 if cache:
114 115 name_key = _hash_key(username)
115 116 user = user.options(
116 117 FromCache("sql_cache_short", "get_user_%s" % name_key))
117 118 return user.scalar()
118 119
119 120 def get_by_email(self, email, cache=False, case_insensitive=False):
120 121 return User.get_by_email(email, case_insensitive, cache)
121 122
122 123 def get_by_auth_token(self, auth_token, cache=False):
123 124 return User.get_by_auth_token(auth_token, cache)
124 125
125 126 def get_active_user_count(self, cache=False):
126 127 qry = User.query().filter(
127 128 User.active == true()).filter(
128 129 User.username != User.DEFAULT_USER)
129 130 if cache:
130 131 qry = qry.options(
131 132 FromCache("sql_cache_short", "get_active_users"))
132 133 return qry.count()
133 134
134 135 def create(self, form_data, cur_user=None):
135 136 if not cur_user:
136 137 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
137 138
138 139 user_data = {
139 140 'username': form_data['username'],
140 141 'password': form_data['password'],
141 142 'email': form_data['email'],
142 143 'firstname': form_data['firstname'],
143 144 'lastname': form_data['lastname'],
144 145 'active': form_data['active'],
145 146 'extern_type': form_data['extern_type'],
146 147 'extern_name': form_data['extern_name'],
147 148 'admin': False,
148 149 'cur_user': cur_user
149 150 }
150 151
151 152 if 'create_repo_group' in form_data:
152 153 user_data['create_repo_group'] = str2bool(
153 154 form_data.get('create_repo_group'))
154 155
155 156 try:
156 157 if form_data.get('password_change'):
157 158 user_data['force_password_change'] = True
158 159 return UserModel().create_or_update(**user_data)
159 160 except Exception:
160 161 log.error(traceback.format_exc())
161 162 raise
162 163
163 164 def update_user(self, user, skip_attrs=None, **kwargs):
164 165 from rhodecode.lib.auth import get_crypt_password
165 166
166 167 user = self._get_user(user)
167 168 if user.username == User.DEFAULT_USER:
168 169 raise DefaultUserException(
169 170 "You can't edit this user (`%(username)s`) since it's "
170 171 "crucial for entire application" % {
171 172 'username': user.username})
172 173
173 174 # first store only defaults
174 175 user_attrs = {
175 176 'updating_user_id': user.user_id,
176 177 'username': user.username,
177 178 'password': user.password,
178 179 'email': user.email,
179 180 'firstname': user.name,
180 181 'lastname': user.lastname,
181 182 'active': user.active,
182 183 'admin': user.admin,
183 184 'extern_name': user.extern_name,
184 185 'extern_type': user.extern_type,
185 186 'language': user.user_data.get('language')
186 187 }
187 188
188 189 # in case there's new_password, that comes from form, use it to
189 190 # store password
190 191 if kwargs.get('new_password'):
191 192 kwargs['password'] = kwargs['new_password']
192 193
193 194 # cleanups, my_account password change form
194 195 kwargs.pop('current_password', None)
195 196 kwargs.pop('new_password', None)
196 197
197 198 # cleanups, user edit password change form
198 199 kwargs.pop('password_confirmation', None)
199 200 kwargs.pop('password_change', None)
200 201
201 202 # create repo group on user creation
202 203 kwargs.pop('create_repo_group', None)
203 204
204 205 # legacy forms send name, which is the firstname
205 206 firstname = kwargs.pop('name', None)
206 207 if firstname:
207 208 kwargs['firstname'] = firstname
208 209
209 210 for k, v in kwargs.items():
210 211 # skip if we don't want to update this
211 212 if skip_attrs and k in skip_attrs:
212 213 continue
213 214
214 215 user_attrs[k] = v
215 216
216 217 try:
217 218 return self.create_or_update(**user_attrs)
218 219 except Exception:
219 220 log.error(traceback.format_exc())
220 221 raise
221 222
222 223 def create_or_update(
223 224 self, username, password, email, firstname='', lastname='',
224 225 active=True, admin=False, extern_type=None, extern_name=None,
225 226 cur_user=None, plugin=None, force_password_change=False,
226 227 allow_to_create_user=True, create_repo_group=None,
227 228 updating_user_id=None, language=None, strict_creation_check=True):
228 229 """
229 230 Creates a new instance if not found, or updates current one
230 231
231 232 :param username:
232 233 :param password:
233 234 :param email:
234 235 :param firstname:
235 236 :param lastname:
236 237 :param active:
237 238 :param admin:
238 239 :param extern_type:
239 240 :param extern_name:
240 241 :param cur_user:
241 242 :param plugin: optional plugin this method was called from
242 243 :param force_password_change: toggles new or existing user flag
243 244 for password change
244 245 :param allow_to_create_user: Defines if the method can actually create
245 246 new users
246 247 :param create_repo_group: Defines if the method should also
247 248 create an repo group with user name, and owner
248 249 :param updating_user_id: if we set it up this is the user we want to
249 250 update this allows to editing username.
250 251 :param language: language of user from interface.
251 252
252 253 :returns: new User object with injected `is_new_user` attribute.
253 254 """
254 255
255 256 if not cur_user:
256 257 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
257 258
258 259 from rhodecode.lib.auth import (
259 260 get_crypt_password, check_password, generate_auth_token)
260 261 from rhodecode.lib.hooks_base import (
261 262 log_create_user, check_allowed_create_user)
262 263
263 264 def _password_change(new_user, password):
264 265 old_password = new_user.password or ''
265 266 # empty password
266 267 if not old_password:
267 268 return False
268 269
269 270 # password check is only needed for RhodeCode internal auth calls
270 271 # in case it's a plugin we don't care
271 272 if not plugin:
272 273
273 274 # first check if we gave crypted password back, and if it
274 275 # matches it's not password change
275 276 if new_user.password == password:
276 277 return False
277 278
278 279 password_match = check_password(password, old_password)
279 280 if not password_match:
280 281 return True
281 282
282 283 return False
283 284
284 285 # read settings on default personal repo group creation
285 286 if create_repo_group is None:
286 287 default_create_repo_group = RepoGroupModel()\
287 288 .get_default_create_personal_repo_group()
288 289 create_repo_group = default_create_repo_group
289 290
290 291 user_data = {
291 292 'username': username,
292 293 'password': password,
293 294 'email': email,
294 295 'firstname': firstname,
295 296 'lastname': lastname,
296 297 'active': active,
297 298 'admin': admin
298 299 }
299 300
300 301 if updating_user_id:
301 302 log.debug('Checking for existing account in RhodeCode '
302 303 'database with user_id `%s` ' % (updating_user_id,))
303 304 user = User.get(updating_user_id)
304 305 else:
305 306 log.debug('Checking for existing account in RhodeCode '
306 307 'database with username `%s` ' % (username,))
307 308 user = User.get_by_username(username, case_insensitive=True)
308 309
309 310 if user is None:
310 311 # we check internal flag if this method is actually allowed to
311 312 # create new user
312 313 if not allow_to_create_user:
313 314 msg = ('Method wants to create new user, but it is not '
314 315 'allowed to do so')
315 316 log.warning(msg)
316 317 raise NotAllowedToCreateUserError(msg)
317 318
318 319 log.debug('Creating new user %s', username)
319 320
320 321 # only if we create user that is active
321 322 new_active_user = active
322 323 if new_active_user and strict_creation_check:
323 324 # raises UserCreationError if it's not allowed for any reason to
324 325 # create new active user, this also executes pre-create hooks
325 326 check_allowed_create_user(user_data, cur_user, strict_check=True)
326 327 events.trigger(events.UserPreCreate(user_data))
327 328 new_user = User()
328 329 edit = False
329 330 else:
330 331 log.debug('updating user %s', username)
331 332 events.trigger(events.UserPreUpdate(user, user_data))
332 333 new_user = user
333 334 edit = True
334 335
335 336 # we're not allowed to edit default user
336 337 if user.username == User.DEFAULT_USER:
337 338 raise DefaultUserException(
338 339 "You can't edit this user (`%(username)s`) since it's "
339 340 "crucial for entire application"
340 341 % {'username': user.username})
341 342
342 343 # inject special attribute that will tell us if User is new or old
343 344 new_user.is_new_user = not edit
344 345 # for users that didn's specify auth type, we use RhodeCode built in
345 346 from rhodecode.authentication.plugins import auth_rhodecode
346 347 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
347 348 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
348 349
349 350 try:
350 351 new_user.username = username
351 352 new_user.admin = admin
352 353 new_user.email = email
353 354 new_user.active = active
354 355 new_user.extern_name = safe_unicode(extern_name)
355 356 new_user.extern_type = safe_unicode(extern_type)
356 357 new_user.name = firstname
357 358 new_user.lastname = lastname
358 359
359 360 # set password only if creating an user or password is changed
360 361 if not edit or _password_change(new_user, password):
361 362 reason = 'new password' if edit else 'new user'
362 363 log.debug('Updating password reason=>%s', reason)
363 364 new_user.password = get_crypt_password(password) if password else None
364 365
365 366 if force_password_change:
366 367 new_user.update_userdata(force_password_change=True)
367 368 if language:
368 369 new_user.update_userdata(language=language)
369 370 new_user.update_userdata(notification_status=True)
370 371
371 372 self.sa.add(new_user)
372 373
373 374 if not edit and create_repo_group:
374 375 RepoGroupModel().create_personal_repo_group(
375 376 new_user, commit_early=False)
376 377
377 378 if not edit:
378 379 # add the RSS token
379 380 AuthTokenModel().create(username,
380 381 description=u'Generated feed token',
381 382 role=AuthTokenModel.cls.ROLE_FEED)
382 383 kwargs = new_user.get_dict()
383 384 # backward compat, require api_keys present
384 385 kwargs['api_keys'] = kwargs['auth_tokens']
385 386 log_create_user(created_by=cur_user, **kwargs)
386 387 events.trigger(events.UserPostCreate(user_data))
387 388 return new_user
388 389 except (DatabaseError,):
389 390 log.error(traceback.format_exc())
390 391 raise
391 392
392 393 def create_registration(self, form_data):
393 394 from rhodecode.model.notification import NotificationModel
394 395 from rhodecode.model.notification import EmailNotificationModel
395 396
396 397 try:
397 398 form_data['admin'] = False
398 399 form_data['extern_name'] = 'rhodecode'
399 400 form_data['extern_type'] = 'rhodecode'
400 401 new_user = self.create(form_data)
401 402
402 403 self.sa.add(new_user)
403 404 self.sa.flush()
404 405
405 406 user_data = new_user.get_dict()
406 407 kwargs = {
407 408 # use SQLALCHEMY safe dump of user data
408 409 'user': AttributeDict(user_data),
409 410 'date': datetime.datetime.now()
410 411 }
411 412 notification_type = EmailNotificationModel.TYPE_REGISTRATION
412 413 # pre-generate the subject for notification itself
413 414 (subject,
414 415 _h, _e, # we don't care about those
415 416 body_plaintext) = EmailNotificationModel().render_email(
416 417 notification_type, **kwargs)
417 418
418 419 # create notification objects, and emails
419 420 NotificationModel().create(
420 421 created_by=new_user,
421 422 notification_subject=subject,
422 423 notification_body=body_plaintext,
423 424 notification_type=notification_type,
424 425 recipients=None, # all admins
425 426 email_kwargs=kwargs,
426 427 )
427 428
428 429 return new_user
429 430 except Exception:
430 431 log.error(traceback.format_exc())
431 432 raise
432 433
433 434 def _handle_user_repos(self, username, repositories, handle_mode=None):
434 435 _superadmin = self.cls.get_first_super_admin()
435 436 left_overs = True
436 437
437 438 from rhodecode.model.repo import RepoModel
438 439
439 440 if handle_mode == 'detach':
440 441 for obj in repositories:
441 442 obj.user = _superadmin
442 443 # set description we know why we super admin now owns
443 444 # additional repositories that were orphaned !
444 445 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
445 446 self.sa.add(obj)
446 447 left_overs = False
447 448 elif handle_mode == 'delete':
448 449 for obj in repositories:
449 450 RepoModel().delete(obj, forks='detach')
450 451 left_overs = False
451 452
452 453 # if nothing is done we have left overs left
453 454 return left_overs
454 455
455 456 def _handle_user_repo_groups(self, username, repository_groups,
456 457 handle_mode=None):
457 458 _superadmin = self.cls.get_first_super_admin()
458 459 left_overs = True
459 460
460 461 from rhodecode.model.repo_group import RepoGroupModel
461 462
462 463 if handle_mode == 'detach':
463 464 for r in repository_groups:
464 465 r.user = _superadmin
465 466 # set description we know why we super admin now owns
466 467 # additional repositories that were orphaned !
467 468 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
468 469 self.sa.add(r)
469 470 left_overs = False
470 471 elif handle_mode == 'delete':
471 472 for r in repository_groups:
472 473 RepoGroupModel().delete(r)
473 474 left_overs = False
474 475
475 476 # if nothing is done we have left overs left
476 477 return left_overs
477 478
478 479 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
479 480 _superadmin = self.cls.get_first_super_admin()
480 481 left_overs = True
481 482
482 483 from rhodecode.model.user_group import UserGroupModel
483 484
484 485 if handle_mode == 'detach':
485 486 for r in user_groups:
486 487 for user_user_group_to_perm in r.user_user_group_to_perm:
487 488 if user_user_group_to_perm.user.username == username:
488 489 user_user_group_to_perm.user = _superadmin
489 490 r.user = _superadmin
490 491 # set description we know why we super admin now owns
491 492 # additional repositories that were orphaned !
492 493 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
493 494 self.sa.add(r)
494 495 left_overs = False
495 496 elif handle_mode == 'delete':
496 497 for r in user_groups:
497 498 UserGroupModel().delete(r)
498 499 left_overs = False
499 500
500 501 # if nothing is done we have left overs left
501 502 return left_overs
502 503
503 504 def delete(self, user, cur_user=None, handle_repos=None,
504 505 handle_repo_groups=None, handle_user_groups=None):
505 506 if not cur_user:
506 507 cur_user = getattr(
507 508 get_current_rhodecode_user(), 'username', None)
508 509 user = self._get_user(user)
509 510
510 511 try:
511 512 if user.username == User.DEFAULT_USER:
512 513 raise DefaultUserException(
513 514 u"You can't remove this user since it's"
514 515 u" crucial for entire application")
515 516
516 517 left_overs = self._handle_user_repos(
517 518 user.username, user.repositories, handle_repos)
518 519 if left_overs and user.repositories:
519 520 repos = [x.repo_name for x in user.repositories]
520 521 raise UserOwnsReposException(
521 522 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
522 523 u'removed. Switch owners or remove those repositories:%(list_repos)s'
523 524 % {'username': user.username, 'len_repos': len(repos),
524 525 'list_repos': ', '.join(repos)})
525 526
526 527 left_overs = self._handle_user_repo_groups(
527 528 user.username, user.repository_groups, handle_repo_groups)
528 529 if left_overs and user.repository_groups:
529 530 repo_groups = [x.group_name for x in user.repository_groups]
530 531 raise UserOwnsRepoGroupsException(
531 532 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
532 533 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
533 534 % {'username': user.username, 'len_repo_groups': len(repo_groups),
534 535 'list_repo_groups': ', '.join(repo_groups)})
535 536
536 537 left_overs = self._handle_user_user_groups(
537 538 user.username, user.user_groups, handle_user_groups)
538 539 if left_overs and user.user_groups:
539 540 user_groups = [x.users_group_name for x in user.user_groups]
540 541 raise UserOwnsUserGroupsException(
541 542 u'user "%s" still owns %s user groups and cannot be '
542 543 u'removed. Switch owners or remove those user groups:%s'
543 544 % (user.username, len(user_groups), ', '.join(user_groups)))
544 545
545 546 # we might change the user data with detach/delete, make sure
546 547 # the object is marked as expired before actually deleting !
547 548 self.sa.expire(user)
548 549 self.sa.delete(user)
549 550 from rhodecode.lib.hooks_base import log_delete_user
550 551 log_delete_user(deleted_by=cur_user, **user.get_dict())
551 552 except Exception:
552 553 log.error(traceback.format_exc())
553 554 raise
554 555
555 556 def reset_password_link(self, data, pwd_reset_url):
556 557 from rhodecode.lib.celerylib import tasks, run_task
557 558 from rhodecode.model.notification import EmailNotificationModel
558 559 user_email = data['email']
559 560 try:
560 561 user = User.get_by_email(user_email)
561 562 if user:
562 563 log.debug('password reset user found %s', user)
563 564
564 565 email_kwargs = {
565 566 'password_reset_url': pwd_reset_url,
566 567 'user': user,
567 568 'email': user_email,
568 569 'date': datetime.datetime.now()
569 570 }
570 571
571 572 (subject, headers, email_body,
572 573 email_body_plaintext) = EmailNotificationModel().render_email(
573 574 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
574 575
575 576 recipients = [user_email]
576 577
577 578 action_logger_generic(
578 579 'sending password reset email to user: {}'.format(
579 580 user), namespace='security.password_reset')
580 581
581 582 run_task(tasks.send_email, recipients, subject,
582 583 email_body_plaintext, email_body)
583 584
584 585 else:
585 586 log.debug("password reset email %s not found", user_email)
586 587 except Exception:
587 588 log.error(traceback.format_exc())
588 589 return False
589 590
590 591 return True
591 592
592 593 def reset_password(self, data):
593 594 from rhodecode.lib.celerylib import tasks, run_task
594 595 from rhodecode.model.notification import EmailNotificationModel
595 596 from rhodecode.lib import auth
596 597 user_email = data['email']
597 598 pre_db = True
598 599 try:
599 600 user = User.get_by_email(user_email)
600 601 new_passwd = auth.PasswordGenerator().gen_password(
601 602 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
602 603 if user:
603 604 user.password = auth.get_crypt_password(new_passwd)
604 605 # also force this user to reset his password !
605 606 user.update_userdata(force_password_change=True)
606 607
607 608 Session().add(user)
608 609
609 610 # now delete the token in question
610 611 UserApiKeys = AuthTokenModel.cls
611 612 UserApiKeys().query().filter(
612 613 UserApiKeys.api_key == data['token']).delete()
613 614
614 615 Session().commit()
615 616 log.info('successfully reset password for `%s`', user_email)
616 617
617 618 if new_passwd is None:
618 619 raise Exception('unable to generate new password')
619 620
620 621 pre_db = False
621 622
622 623 email_kwargs = {
623 624 'new_password': new_passwd,
624 625 'user': user,
625 626 'email': user_email,
626 627 'date': datetime.datetime.now()
627 628 }
628 629
629 630 (subject, headers, email_body,
630 631 email_body_plaintext) = EmailNotificationModel().render_email(
631 632 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
632 633 **email_kwargs)
633 634
634 635 recipients = [user_email]
635 636
636 637 action_logger_generic(
637 638 'sent new password to user: {} with email: {}'.format(
638 639 user, user_email), namespace='security.password_reset')
639 640
640 641 run_task(tasks.send_email, recipients, subject,
641 642 email_body_plaintext, email_body)
642 643
643 644 except Exception:
644 645 log.error('Failed to update user password')
645 646 log.error(traceback.format_exc())
646 647 if pre_db:
647 648 # we rollback only if local db stuff fails. If it goes into
648 649 # run_task, we're pass rollback state this wouldn't work then
649 650 Session().rollback()
650 651
651 652 return True
652 653
653 654 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
654 655 """
655 656 Fetches auth_user by user_id,or api_key if present.
656 657 Fills auth_user attributes with those taken from database.
657 658 Additionally set's is_authenitated if lookup fails
658 659 present in database
659 660
660 661 :param auth_user: instance of user to set attributes
661 662 :param user_id: user id to fetch by
662 663 :param api_key: api key to fetch by
663 664 :param username: username to fetch by
664 665 """
665 666 if user_id is None and api_key is None and username is None:
666 667 raise Exception('You need to pass user_id, api_key or username')
667 668
668 669 log.debug(
669 670 'AuthUser: fill data execution based on: '
670 671 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
671 672 try:
672 673 dbuser = None
673 674 if user_id:
674 675 dbuser = self.get(user_id)
675 676 elif api_key:
676 677 dbuser = self.get_by_auth_token(api_key)
677 678 elif username:
678 679 dbuser = self.get_by_username(username)
679 680
680 681 if not dbuser:
681 682 log.warning(
682 683 'Unable to lookup user by id:%s api_key:%s username:%s',
683 684 user_id, api_key, username)
684 685 return False
685 686 if not dbuser.active:
686 687 log.debug('User `%s:%s` is inactive, skipping fill data',
687 688 username, user_id)
688 689 return False
689 690
690 691 log.debug('AuthUser: filling found user:%s data', dbuser)
691 692 user_data = dbuser.get_dict()
692 693
693 694 user_data.update({
694 695 # set explicit the safe escaped values
695 696 'first_name': dbuser.first_name,
696 697 'last_name': dbuser.last_name,
697 698 })
698 699
699 700 for k, v in user_data.items():
700 701 # properties of auth user we dont update
701 702 if k not in ['auth_tokens', 'permissions']:
702 703 setattr(auth_user, k, v)
703 704
704 705 except Exception:
705 706 log.error(traceback.format_exc())
706 707 auth_user.is_authenticated = False
707 708 return False
708 709
709 710 return True
710 711
711 712 def has_perm(self, user, perm):
712 713 perm = self._get_perm(perm)
713 714 user = self._get_user(user)
714 715
715 716 return UserToPerm.query().filter(UserToPerm.user == user)\
716 717 .filter(UserToPerm.permission == perm).scalar() is not None
717 718
718 719 def grant_perm(self, user, perm):
719 720 """
720 721 Grant user global permissions
721 722
722 723 :param user:
723 724 :param perm:
724 725 """
725 726 user = self._get_user(user)
726 727 perm = self._get_perm(perm)
727 728 # if this permission is already granted skip it
728 729 _perm = UserToPerm.query()\
729 730 .filter(UserToPerm.user == user)\
730 731 .filter(UserToPerm.permission == perm)\
731 732 .scalar()
732 733 if _perm:
733 734 return
734 735 new = UserToPerm()
735 736 new.user = user
736 737 new.permission = perm
737 738 self.sa.add(new)
738 739 return new
739 740
740 741 def revoke_perm(self, user, perm):
741 742 """
742 743 Revoke users global permissions
743 744
744 745 :param user:
745 746 :param perm:
746 747 """
747 748 user = self._get_user(user)
748 749 perm = self._get_perm(perm)
749 750
750 751 obj = UserToPerm.query()\
751 752 .filter(UserToPerm.user == user)\
752 753 .filter(UserToPerm.permission == perm)\
753 754 .scalar()
754 755 if obj:
755 756 self.sa.delete(obj)
756 757
757 758 def add_extra_email(self, user, email):
758 759 """
759 760 Adds email address to UserEmailMap
760 761
761 762 :param user:
762 763 :param email:
763 764 """
764 765
765 766 user = self._get_user(user)
766 767
767 768 obj = UserEmailMap()
768 769 obj.user = user
769 770 obj.email = email
770 771 self.sa.add(obj)
771 772 return obj
772 773
773 774 def delete_extra_email(self, user, email_id):
774 775 """
775 776 Removes email address from UserEmailMap
776 777
777 778 :param user:
778 779 :param email_id:
779 780 """
780 781 user = self._get_user(user)
781 782 obj = UserEmailMap.query().get(email_id)
782 783 if obj and obj.user_id == user.user_id:
783 784 self.sa.delete(obj)
784 785
785 786 def parse_ip_range(self, ip_range):
786 787 ip_list = []
787 788
788 789 def make_unique(value):
789 790 seen = []
790 791 return [c for c in value if not (c in seen or seen.append(c))]
791 792
792 793 # firsts split by commas
793 794 for ip_range in ip_range.split(','):
794 795 if not ip_range:
795 796 continue
796 797 ip_range = ip_range.strip()
797 798 if '-' in ip_range:
798 799 start_ip, end_ip = ip_range.split('-', 1)
799 800 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
800 801 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
801 802 parsed_ip_range = []
802 803
803 804 for index in xrange(int(start_ip), int(end_ip) + 1):
804 805 new_ip = ipaddress.ip_address(index)
805 806 parsed_ip_range.append(str(new_ip))
806 807 ip_list.extend(parsed_ip_range)
807 808 else:
808 809 ip_list.append(ip_range)
809 810
810 811 return make_unique(ip_list)
811 812
812 813 def add_extra_ip(self, user, ip, description=None):
813 814 """
814 815 Adds ip address to UserIpMap
815 816
816 817 :param user:
817 818 :param ip:
818 819 """
819 820
820 821 user = self._get_user(user)
821 822 obj = UserIpMap()
822 823 obj.user = user
823 824 obj.ip_addr = ip
824 825 obj.description = description
825 826 self.sa.add(obj)
826 827 return obj
827 828
828 829 def delete_extra_ip(self, user, ip_id):
829 830 """
830 831 Removes ip address from UserIpMap
831 832
832 833 :param user:
833 834 :param ip_id:
834 835 """
835 836 user = self._get_user(user)
836 837 obj = UserIpMap.query().get(ip_id)
837 838 if obj and obj.user_id == user.user_id:
838 839 self.sa.delete(obj)
839 840
840 841 def get_accounts_in_creation_order(self, current_user=None):
841 842 """
842 843 Get accounts in order of creation for deactivation for license limits
843 844
844 845 pick currently logged in user, and append to the list in position 0
845 846 pick all super-admins in order of creation date and add it to the list
846 847 pick all other accounts in order of creation and add it to the list.
847 848
848 849 Based on that list, the last accounts can be disabled as they are
849 850 created at the end and don't include any of the super admins as well
850 851 as the current user.
851 852
852 853 :param current_user: optionally current user running this operation
853 854 """
854 855
855 856 if not current_user:
856 857 current_user = get_current_rhodecode_user()
857 858 active_super_admins = [
858 859 x.user_id for x in User.query()
859 860 .filter(User.user_id != current_user.user_id)
860 861 .filter(User.active == true())
861 862 .filter(User.admin == true())
862 863 .order_by(User.created_on.asc())]
863 864
864 865 active_regular_users = [
865 866 x.user_id for x in User.query()
866 867 .filter(User.user_id != current_user.user_id)
867 868 .filter(User.active == true())
868 869 .filter(User.admin == false())
869 870 .order_by(User.created_on.asc())]
870 871
871 872 list_of_accounts = [current_user.user_id]
872 873 list_of_accounts += active_super_admins
873 874 list_of_accounts += active_regular_users
874 875
875 876 return list_of_accounts
876 877
877 878 def deactivate_last_users(self, expected_users, current_user=None):
878 879 """
879 880 Deactivate accounts that are over the license limits.
880 881 Algorithm of which accounts to disabled is based on the formula:
881 882
882 883 Get current user, then super admins in creation order, then regular
883 884 active users in creation order.
884 885
885 886 Using that list we mark all accounts from the end of it as inactive.
886 887 This way we block only latest created accounts.
887 888
888 889 :param expected_users: list of users in special order, we deactivate
889 890 the end N ammoun of users from that list
890 891 """
891 892
892 893 list_of_accounts = self.get_accounts_in_creation_order(
893 894 current_user=current_user)
894 895
895 896 for acc_id in list_of_accounts[expected_users + 1:]:
896 897 user = User.get(acc_id)
897 898 log.info('Deactivating account %s for license unlock', user)
898 899 user.active = False
899 900 Session().add(user)
900 901 Session().commit()
901 902
902 903 return
903 904
904 905 def get_user_log(self, user, filter_term):
905 906 user_log = UserLog.query()\
906 907 .filter(or_(UserLog.user_id == user.user_id,
907 908 UserLog.username == user.username))\
908 909 .options(joinedload(UserLog.user))\
909 910 .options(joinedload(UserLog.repository))\
910 911 .order_by(UserLog.action_date.desc())
911 912
912 913 user_log = user_log_filter(user_log, filter_term)
913 914 return user_log
@@ -1,34 +1,35 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 from rhodecode.model.validation_schema import validators, preparers, types
23 23
24 24
25 25 class ReviewerSchema(colander.MappingSchema):
26 26 username = colander.SchemaNode(types.StrOrIntType())
27 27 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
28 28 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
29 rules = colander.SchemaNode(colander.List(), missing=[])
29 30
30 31
31 32 class ReviewerListSchema(colander.SequenceSchema):
32 33 reviewers = ReviewerSchema()
33 34
34 35
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now