##// END OF EJS Templates
default-reviewers: introduce new voting rule logic that allows...
marcink -
r2484:3775edd6 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,38 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_11_0_0 as db
18
19 reviewers_table = db.PullRequestReviewers.__table__
20
21 rule_data = Column(
22 'rule_data_json',
23 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
24 rule_data.create(table=reviewers_table)
25
26 # issue fixups
27 fixups(db, meta.Session)
28
29
30 def downgrade(migrate_engine):
31 meta = MetaData()
32 meta.bind = migrate_engine
33
34
35 def fixups(models, _SESSION):
36 pass
37
38
@@ -0,0 +1,37 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_11_0_0 as db
18
19 user_group_review_table = db.RepoReviewRuleUserGroup.__table__
20
21 vote_rule = Column("vote_rule", Integer(), nullable=True,
22 default=-1)
23 vote_rule.create(table=user_group_review_table)
24
25 # issue fixups
26 fixups(db, meta.Session)
27
28
29 def downgrade(migrate_engine):
30 meta = MetaData()
31 meta.bind = migrate_engine
32
33
34 def fixups(models, _SESSION):
35 pass
36
37
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,194 +1,196 b''
1 1 {
2 2 "dirs": {
3 3 "css": {
4 4 "src":"rhodecode/public/css",
5 5 "dest":"rhodecode/public/css"
6 6 },
7 7 "js": {
8 8 "src": "rhodecode/public/js/src",
9 9 "src_rc": "rhodecode/public/js/rhodecode",
10 10 "dest": "rhodecode/public/js",
11 11 "bower": "bower_components",
12 12 "node_modules": "node_modules"
13 13 }
14 14 },
15 15 "copy": {
16 16 "main": {
17 17 "expand": true,
18 18 "cwd": "bower_components",
19 19 "src": "webcomponentsjs/webcomponents-lite.js",
20 20 "dest": "<%= dirs.js.dest %>/vendors"
21 21 }
22 22 },
23 23 "concat": {
24 24 "polymercss": {
25 25 "src": [
26 26 "<%= dirs.js.src %>/components/root-styles-prefix.html",
27 27 "<%= dirs.css.src %>/style-polymer.css",
28 28 "<%= dirs.js.src %>/components/root-styles-suffix.html"
29 29 ],
30 30 "dest": "<%= dirs.js.dest %>/src/components/root-styles.gen.html",
31 31 "nonull": true
32 32 },
33 33 "dist": {
34 34 "src": [
35 35 "<%= dirs.js.node_modules %>/jquery/dist/jquery.min.js",
36 36 "<%= dirs.js.node_modules %>/mousetrap/mousetrap.min.js",
37 37 "<%= dirs.js.node_modules %>/moment/min/moment.min.js",
38 38 "<%= dirs.js.node_modules %>/clipboard/dist/clipboard.min.js",
39 39 "<%= dirs.js.node_modules %>/favico.js/favico-0.3.10.min.js",
40 40 "<%= dirs.js.node_modules %>/appenlight-client/appenlight-client.min.js",
41 41 "<%= dirs.js.src %>/logging.js",
42 42 "<%= dirs.js.src %>/bootstrap.js",
43 43 "<%= dirs.js.src %>/i18n_utils.js",
44 44 "<%= dirs.js.src %>/deform.js",
45 "<%= dirs.js.src %>/ejs.js",
46 "<%= dirs.js.src %>/ejs_templates/utils.js",
45 47 "<%= dirs.js.src %>/plugins/jquery.pjax.js",
46 48 "<%= dirs.js.src %>/plugins/jquery.dataTables.js",
47 49 "<%= dirs.js.src %>/plugins/flavoured_checkbox.js",
48 50 "<%= dirs.js.src %>/plugins/jquery.auto-grow-input.js",
49 51 "<%= dirs.js.src %>/plugins/jquery.autocomplete.js",
50 52 "<%= dirs.js.src %>/plugins/jquery.debounce.js",
51 53 "<%= dirs.js.src %>/plugins/jquery.mark.js",
52 54 "<%= dirs.js.src %>/plugins/jquery.timeago.js",
53 55 "<%= dirs.js.src %>/plugins/jquery.timeago-extension.js",
54 56 "<%= dirs.js.src %>/select2/select2.js",
55 57 "<%= dirs.js.src %>/codemirror/codemirror.js",
56 58 "<%= dirs.js.src %>/codemirror/codemirror_loadmode.js",
57 59 "<%= dirs.js.src %>/codemirror/codemirror_hint.js",
58 60 "<%= dirs.js.src %>/codemirror/codemirror_overlay.js",
59 61 "<%= dirs.js.src %>/codemirror/codemirror_placeholder.js",
60 62 "<%= dirs.js.src %>/codemirror/codemirror_simplemode.js",
61 63 "<%= dirs.js.dest %>/mode/meta.js",
62 64 "<%= dirs.js.dest %>/mode/meta_ext.js",
63 65 "<%= dirs.js.src_rc %>/i18n/select2/translations.js",
64 66 "<%= dirs.js.src %>/rhodecode/utils/array.js",
65 67 "<%= dirs.js.src %>/rhodecode/utils/string.js",
66 68 "<%= dirs.js.src %>/rhodecode/utils/pyroutes.js",
67 69 "<%= dirs.js.src %>/rhodecode/utils/ajax.js",
68 70 "<%= dirs.js.src %>/rhodecode/utils/autocomplete.js",
69 71 "<%= dirs.js.src %>/rhodecode/utils/colorgenerator.js",
70 72 "<%= dirs.js.src %>/rhodecode/utils/ie.js",
71 73 "<%= dirs.js.src %>/rhodecode/utils/os.js",
72 74 "<%= dirs.js.src %>/rhodecode/utils/topics.js",
73 75 "<%= dirs.js.src %>/rhodecode/init.js",
74 76 "<%= dirs.js.src %>/rhodecode/changelog.js",
75 77 "<%= dirs.js.src %>/rhodecode/codemirror.js",
76 78 "<%= dirs.js.src %>/rhodecode/comments.js",
77 79 "<%= dirs.js.src %>/rhodecode/constants.js",
78 80 "<%= dirs.js.src %>/rhodecode/files.js",
79 81 "<%= dirs.js.src %>/rhodecode/followers.js",
80 82 "<%= dirs.js.src %>/rhodecode/menus.js",
81 83 "<%= dirs.js.src %>/rhodecode/notifications.js",
82 84 "<%= dirs.js.src %>/rhodecode/permissions.js",
83 85 "<%= dirs.js.src %>/rhodecode/pjax.js",
84 86 "<%= dirs.js.src %>/rhodecode/pullrequests.js",
85 87 "<%= dirs.js.src %>/rhodecode/settings.js",
86 88 "<%= dirs.js.src %>/rhodecode/select2_widgets.js",
87 89 "<%= dirs.js.src %>/rhodecode/tooltips.js",
88 90 "<%= dirs.js.src %>/rhodecode/users.js",
89 91 "<%= dirs.js.src %>/rhodecode/appenlight.js",
90 92 "<%= dirs.js.src %>/rhodecode.js"
91 93 ],
92 94 "dest": "<%= dirs.js.dest %>/scripts.js",
93 95 "nonull": true
94 96 }
95 97 },
96 98 "crisper": {
97 99 "dist": {
98 100 "options": {
99 101 "cleanup": false,
100 102 "onlySplit": true
101 103 },
102 104 "src": "<%= dirs.js.dest %>/rhodecode-components.html",
103 105 "dest": "<%= dirs.js.dest %>/rhodecode-components.js"
104 106 }
105 107 },
106 108 "less": {
107 109 "development": {
108 110 "options": {
109 111 "compress": false,
110 112 "yuicompress": false,
111 113 "optimization": 0
112 114 },
113 115 "files": {
114 116 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
115 117 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
116 118 }
117 119 },
118 120 "production": {
119 121 "options": {
120 122 "compress": true,
121 123 "yuicompress": true,
122 124 "optimization": 2
123 125 },
124 126 "files": {
125 127 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
126 128 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
127 129 }
128 130 },
129 131 "components": {
130 132 "files": [
131 133 {
132 134 "cwd": "<%= dirs.js.src %>/components/",
133 135 "dest": "<%= dirs.js.src %>/components/",
134 136 "src": [
135 137 "**/*.less"
136 138 ],
137 139 "expand": true,
138 140 "ext": ".css"
139 141 }
140 142 ]
141 143 }
142 144 },
143 145 "watch": {
144 146 "less": {
145 147 "files": [
146 148 "<%= dirs.css.src %>/**/*.less",
147 149 "<%= dirs.js.src %>/components/**/*.less"
148 150 ],
149 151 "tasks": [
150 152 "less:development",
151 153 "less:components",
152 154 "concat:polymercss",
153 155 "vulcanize",
154 156 "crisper",
155 157 "concat:dist"
156 158 ]
157 159 },
158 160 "js": {
159 161 "files": [
160 162 "!<%= dirs.js.src %>/components/root-styles.gen.html",
161 163 "<%= dirs.js.src %>/**/*.js",
162 164 "<%= dirs.js.src %>/components/**/*.html"
163 165 ],
164 166 "tasks": [
165 167 "less:components",
166 168 "concat:polymercss",
167 169 "vulcanize",
168 170 "crisper",
169 171 "concat:dist"
170 172 ]
171 173 }
172 174 },
173 175 "jshint": {
174 176 "rhodecode": {
175 177 "src": "<%= dirs.js.src %>/rhodecode/**/*.js",
176 178 "options": {
177 179 "jshintrc": ".jshintrc"
178 180 }
179 181 }
180 182 },
181 183 "vulcanize": {
182 184 "default": {
183 185 "options": {
184 186 "abspath": "",
185 187 "inlineScripts": true,
186 188 "inlineCss": true,
187 189 "stripComments": true
188 190 },
189 191 "files": {
190 192 "<%= dirs.js.dest %>/rhodecode-components.html": "<%= dirs.js.src %>/components/shared-components.html"
191 193 }
192 194 }
193 195 }
194 196 }
@@ -1,63 +1,63 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22
23 23 RhodeCode, a web based repository management software
24 24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 25 """
26 26
27 27 import os
28 28 import sys
29 29 import platform
30 30
31 31 VERSION = tuple(open(os.path.join(
32 32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33 33
34 34 BACKENDS = {
35 35 'hg': 'Mercurial repository',
36 36 'git': 'Git repository',
37 37 'svn': 'Subversion repository',
38 38 }
39 39
40 40 CELERY_ENABLED = False
41 41 CELERY_EAGER = False
42 42
43 43 # link to config for pyramid
44 44 CONFIG = {}
45 45
46 46 # Populated with the settings dictionary from application init in
47 47 # rhodecode.conf.environment.load_pyramid_environment
48 48 PYRAMID_SETTINGS = {}
49 49
50 50 # Linked module for extensions
51 51 EXTENSIONS = {}
52 52
53 53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 83 # defines current db version for migrations
54 __dbversion__ = 85 # defines current db version for migrations
55 55 __platform__ = platform.system()
56 56 __license__ = 'AGPLv3, and Commercial License'
57 57 __author__ = 'RhodeCode GmbH'
58 58 __url__ = 'https://code.rhodecode.com'
59 59
60 60 is_windows = __platform__ in ['Windows']
61 61 is_unix = not is_windows
62 62 is_test = False
63 63 disable_error_handler = False
@@ -1,142 +1,142 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23 import urlobject
24 24
25 25 from rhodecode.api.tests.utils import (
26 26 build_data, api_call, assert_error, assert_ok)
27 27 from rhodecode.lib import helpers as h
28 28 from rhodecode.lib.utils2 import safe_unicode
29 29
30 30 pytestmark = pytest.mark.backends("git", "hg")
31 31
32 32
33 33 @pytest.mark.usefixtures("testuser_api", "app")
34 34 class TestGetPullRequest(object):
35 35
36 36 def test_api_get_pull_request(self, pr_util, http_host_only_stub):
37 37 from rhodecode.model.pull_request import PullRequestModel
38 38 pull_request = pr_util.create_pull_request(mergeable=True)
39 39 id_, params = build_data(
40 40 self.apikey, 'get_pull_request',
41 41 pullrequestid=pull_request.pull_request_id)
42 42
43 43 response = api_call(self.app, params)
44 44
45 45 assert response.status == '200 OK'
46 46
47 47 url_obj = urlobject.URLObject(
48 48 h.route_url(
49 49 'pullrequest_show',
50 50 repo_name=pull_request.target_repo.repo_name,
51 51 pull_request_id=pull_request.pull_request_id))
52 52
53 53 pr_url = safe_unicode(
54 54 url_obj.with_netloc(http_host_only_stub))
55 55 source_url = safe_unicode(
56 56 pull_request.source_repo.clone_url().with_netloc(http_host_only_stub))
57 57 target_url = safe_unicode(
58 58 pull_request.target_repo.clone_url().with_netloc(http_host_only_stub))
59 59 shadow_url = safe_unicode(
60 60 PullRequestModel().get_shadow_clone_url(pull_request))
61 61
62 62 expected = {
63 63 'pull_request_id': pull_request.pull_request_id,
64 64 'url': pr_url,
65 65 'title': pull_request.title,
66 66 'description': pull_request.description,
67 67 'status': pull_request.status,
68 68 'created_on': pull_request.created_on,
69 69 'updated_on': pull_request.updated_on,
70 70 'commit_ids': pull_request.revisions,
71 71 'review_status': pull_request.calculated_review_status(),
72 72 'mergeable': {
73 73 'status': True,
74 74 'message': 'This pull request can be automatically merged.',
75 75 },
76 76 'source': {
77 77 'clone_url': source_url,
78 78 'repository': pull_request.source_repo.repo_name,
79 79 'reference': {
80 80 'name': pull_request.source_ref_parts.name,
81 81 'type': pull_request.source_ref_parts.type,
82 82 'commit_id': pull_request.source_ref_parts.commit_id,
83 83 },
84 84 },
85 85 'target': {
86 86 'clone_url': target_url,
87 87 'repository': pull_request.target_repo.repo_name,
88 88 'reference': {
89 89 'name': pull_request.target_ref_parts.name,
90 90 'type': pull_request.target_ref_parts.type,
91 91 'commit_id': pull_request.target_ref_parts.commit_id,
92 92 },
93 93 },
94 94 'merge': {
95 95 'clone_url': shadow_url,
96 96 'reference': {
97 97 'name': pull_request.shadow_merge_ref.name,
98 98 'type': pull_request.shadow_merge_ref.type,
99 99 'commit_id': pull_request.shadow_merge_ref.commit_id,
100 100 },
101 101 },
102 102 'author': pull_request.author.get_api_data(include_secrets=False,
103 103 details='basic'),
104 104 'reviewers': [
105 105 {
106 106 'user': reviewer.get_api_data(include_secrets=False,
107 107 details='basic'),
108 108 'reasons': reasons,
109 109 'review_status': st[0][1].status if st else 'not_reviewed',
110 110 }
111 for reviewer, reasons, mandatory, st in
111 for obj, reviewer, reasons, mandatory, st in
112 112 pull_request.reviewers_statuses()
113 113 ]
114 114 }
115 115 assert_ok(id_, expected, response.body)
116 116
117 117 def test_api_get_pull_request_repo_error(self, pr_util):
118 118 pull_request = pr_util.create_pull_request()
119 119 id_, params = build_data(
120 120 self.apikey, 'get_pull_request',
121 121 repoid=666, pullrequestid=pull_request.pull_request_id)
122 122 response = api_call(self.app, params)
123 123
124 124 expected = 'repository `666` does not exist'
125 125 assert_error(id_, expected, given=response.body)
126 126
127 127 def test_api_get_pull_request_pull_request_error(self):
128 128 id_, params = build_data(
129 129 self.apikey, 'get_pull_request', pullrequestid=666)
130 130 response = api_call(self.app, params)
131 131
132 132 expected = 'pull request `666` does not exist'
133 133 assert_error(id_, expected, given=response.body)
134 134
135 135 def test_api_get_pull_request_pull_request_error_just_pr_id(self):
136 136 id_, params = build_data(
137 137 self.apikey, 'get_pull_request',
138 138 pullrequestid=666)
139 139 response = api_call(self.app, params)
140 140
141 141 expected = 'pull request `666` does not exist'
142 142 assert_error(id_, expected, given=response.body)
@@ -1,213 +1,213 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.lib.vcs.nodes import FileNode
24 24 from rhodecode.model.db import User
25 25 from rhodecode.model.pull_request import PullRequestModel
26 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 27 from rhodecode.api.tests.utils import (
28 28 build_data, api_call, assert_ok, assert_error)
29 29
30 30
31 31 @pytest.mark.usefixtures("testuser_api", "app")
32 32 class TestUpdatePullRequest(object):
33 33
34 34 @pytest.mark.backends("git", "hg")
35 35 def test_api_update_pull_request_title_or_description(
36 36 self, pr_util, no_notifications):
37 37 pull_request = pr_util.create_pull_request()
38 38
39 39 id_, params = build_data(
40 40 self.apikey, 'update_pull_request',
41 41 repoid=pull_request.target_repo.repo_name,
42 42 pullrequestid=pull_request.pull_request_id,
43 43 title='New TITLE OF A PR',
44 44 description='New DESC OF A PR',
45 45 )
46 46 response = api_call(self.app, params)
47 47
48 48 expected = {
49 49 "msg": "Updated pull request `{}`".format(
50 50 pull_request.pull_request_id),
51 51 "pull_request": response.json['result']['pull_request'],
52 52 "updated_commits": {"added": [], "common": [], "removed": []},
53 53 "updated_reviewers": {"added": [], "removed": []},
54 54 }
55 55
56 56 response_json = response.json['result']
57 57 assert response_json == expected
58 58 pr = response_json['pull_request']
59 59 assert pr['title'] == 'New TITLE OF A PR'
60 60 assert pr['description'] == 'New DESC OF A PR'
61 61
62 62 @pytest.mark.backends("git", "hg")
63 63 def test_api_try_update_closed_pull_request(
64 64 self, pr_util, no_notifications):
65 65 pull_request = pr_util.create_pull_request()
66 66 PullRequestModel().close_pull_request(
67 67 pull_request, TEST_USER_ADMIN_LOGIN)
68 68
69 69 id_, params = build_data(
70 70 self.apikey, 'update_pull_request',
71 71 repoid=pull_request.target_repo.repo_name,
72 72 pullrequestid=pull_request.pull_request_id)
73 73 response = api_call(self.app, params)
74 74
75 75 expected = 'pull request `{}` update failed, pull request ' \
76 76 'is closed'.format(pull_request.pull_request_id)
77 77
78 78 assert_error(id_, expected, response.body)
79 79
80 80 @pytest.mark.backends("git", "hg")
81 81 def test_api_update_update_commits(self, pr_util, no_notifications):
82 82 commits = [
83 83 {'message': 'a'},
84 84 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
85 85 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
86 86 ]
87 87 pull_request = pr_util.create_pull_request(
88 88 commits=commits, target_head='a', source_head='b', revisions=['b'])
89 89 pr_util.update_source_repository(head='c')
90 90 repo = pull_request.source_repo.scm_instance()
91 91 commits = [x for x in repo.get_commits()]
92 92 print commits
93 93
94 94 added_commit_id = commits[-1].raw_id # c commit
95 95 common_commit_id = commits[1].raw_id # b commit is common ancestor
96 96 total_commits = [added_commit_id, common_commit_id]
97 97
98 98 id_, params = build_data(
99 99 self.apikey, 'update_pull_request',
100 100 repoid=pull_request.target_repo.repo_name,
101 101 pullrequestid=pull_request.pull_request_id,
102 102 update_commits=True
103 103 )
104 104 response = api_call(self.app, params)
105 105
106 106 expected = {
107 107 "msg": "Updated pull request `{}`".format(
108 108 pull_request.pull_request_id),
109 109 "pull_request": response.json['result']['pull_request'],
110 110 "updated_commits": {"added": [added_commit_id],
111 111 "common": [common_commit_id],
112 112 "total": total_commits,
113 113 "removed": []},
114 114 "updated_reviewers": {"added": [], "removed": []},
115 115 }
116 116
117 117 assert_ok(id_, expected, response.body)
118 118
119 119 @pytest.mark.backends("git", "hg")
120 120 def test_api_update_change_reviewers(
121 121 self, user_util, pr_util, no_notifications):
122 122 a = user_util.create_user()
123 123 b = user_util.create_user()
124 124 c = user_util.create_user()
125 125 new_reviewers = [
126 126 {'username': b.username,'reasons': ['updated via API'],
127 127 'mandatory':False},
128 128 {'username': c.username, 'reasons': ['updated via API'],
129 129 'mandatory':False},
130 130 ]
131 131
132 132 added = [b.username, c.username]
133 133 removed = [a.username]
134 134
135 135 pull_request = pr_util.create_pull_request(
136 reviewers=[(a.username, ['added via API'], False)])
136 reviewers=[(a.username, ['added via API'], False, [])])
137 137
138 138 id_, params = build_data(
139 139 self.apikey, 'update_pull_request',
140 140 repoid=pull_request.target_repo.repo_name,
141 141 pullrequestid=pull_request.pull_request_id,
142 142 reviewers=new_reviewers)
143 143 response = api_call(self.app, params)
144 144 expected = {
145 145 "msg": "Updated pull request `{}`".format(
146 146 pull_request.pull_request_id),
147 147 "pull_request": response.json['result']['pull_request'],
148 148 "updated_commits": {"added": [], "common": [], "removed": []},
149 149 "updated_reviewers": {"added": added, "removed": removed},
150 150 }
151 151
152 152 assert_ok(id_, expected, response.body)
153 153
154 154 @pytest.mark.backends("git", "hg")
155 155 def test_api_update_bad_user_in_reviewers(self, pr_util):
156 156 pull_request = pr_util.create_pull_request()
157 157
158 158 id_, params = build_data(
159 159 self.apikey, 'update_pull_request',
160 160 repoid=pull_request.target_repo.repo_name,
161 161 pullrequestid=pull_request.pull_request_id,
162 162 reviewers=[{'username': 'bad_name'}])
163 163 response = api_call(self.app, params)
164 164
165 165 expected = 'user `bad_name` does not exist'
166 166
167 167 assert_error(id_, expected, response.body)
168 168
169 169 @pytest.mark.backends("git", "hg")
170 170 def test_api_update_repo_error(self, pr_util):
171 171 pull_request = pr_util.create_pull_request()
172 172 id_, params = build_data(
173 173 self.apikey, 'update_pull_request',
174 174 repoid='fake',
175 175 pullrequestid=pull_request.pull_request_id,
176 176 reviewers=[{'username': 'bad_name'}])
177 177 response = api_call(self.app, params)
178 178
179 179 expected = 'repository `fake` does not exist'
180 180
181 181 response_json = response.json['error']
182 182 assert response_json == expected
183 183
184 184 @pytest.mark.backends("git", "hg")
185 185 def test_api_update_pull_request_error(self, pr_util):
186 186 pull_request = pr_util.create_pull_request()
187 187
188 188 id_, params = build_data(
189 189 self.apikey, 'update_pull_request',
190 190 repoid=pull_request.target_repo.repo_name,
191 191 pullrequestid=999999,
192 192 reviewers=[{'username': 'bad_name'}])
193 193 response = api_call(self.app, params)
194 194
195 195 expected = 'pull request `999999` does not exist'
196 196 assert_error(id_, expected, response.body)
197 197
198 198 @pytest.mark.backends("git", "hg")
199 199 def test_api_update_pull_request_no_perms_to_update(
200 200 self, user_util, pr_util):
201 201 user = user_util.create_user()
202 202 pull_request = pr_util.create_pull_request()
203 203
204 204 id_, params = build_data(
205 205 user.api_key, 'update_pull_request',
206 206 repoid=pull_request.target_repo.repo_name,
207 207 pullrequestid=pull_request.pull_request_id,)
208 208 response = api_call(self.app, params)
209 209
210 210 expected = ('pull request `%s` update failed, '
211 211 'no permission to update.') % pull_request.pull_request_id
212 212
213 213 assert_error(id_, expected, response.body)
@@ -1,248 +1,247 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 import formencode
24 24 import formencode.htmlfill
25 25
26 26 from pyramid.httpexceptions import HTTPFound
27 27 from pyramid.view import view_config
28 28 from pyramid.response import Response
29 29 from pyramid.renderers import render
30 30
31 31 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 32 from rhodecode.lib.auth import (
33 33 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
34 34 from rhodecode.lib import helpers as h, audit_logger
35 35 from rhodecode.lib.utils2 import safe_unicode
36 36
37 37 from rhodecode.model.forms import UserGroupForm
38 38 from rhodecode.model.permission import PermissionModel
39 39 from rhodecode.model.scm import UserGroupList
40 40 from rhodecode.model.db import (
41 41 or_, count, User, UserGroup, UserGroupMember)
42 42 from rhodecode.model.meta import Session
43 43 from rhodecode.model.user_group import UserGroupModel
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class AdminUserGroupsView(BaseAppView, DataGridAppView):
49 49
50 50 def load_default_context(self):
51 51 c = self._get_local_tmpl_context()
52 52
53 53 PermissionModel().set_global_permission_choices(
54 54 c, gettext_translator=self.request.translate)
55 55
56
57 56 return c
58 57
59 58 # permission check in data loading of
60 59 # `user_groups_list_data` via UserGroupList
61 60 @LoginRequired()
62 61 @NotAnonymous()
63 62 @view_config(
64 63 route_name='user_groups', request_method='GET',
65 64 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
66 65 def user_groups_list(self):
67 66 c = self.load_default_context()
68 67 return self._get_template_context(c)
69 68
70 69 # permission check inside
71 70 @LoginRequired()
72 71 @NotAnonymous()
73 72 @view_config(
74 73 route_name='user_groups_data', request_method='GET',
75 74 renderer='json_ext', xhr=True)
76 75 def user_groups_list_data(self):
77 76 self.load_default_context()
78 77 column_map = {
79 78 'active': 'users_group_active',
80 79 'description': 'user_group_description',
81 80 'members': 'members_total',
82 81 'owner': 'user_username',
83 82 'sync': 'group_data'
84 83 }
85 84 draw, start, limit = self._extract_chunk(self.request)
86 85 search_q, order_by, order_dir = self._extract_ordering(
87 86 self.request, column_map=column_map)
88 87
89 88 _render = self.request.get_partial_renderer(
90 89 'rhodecode:templates/data_table/_dt_elements.mako')
91 90
92 91 def user_group_name(user_group_id, user_group_name):
93 92 return _render("user_group_name", user_group_id, user_group_name)
94 93
95 94 def user_group_actions(user_group_id, user_group_name):
96 95 return _render("user_group_actions", user_group_id, user_group_name)
97 96
98 97 def user_profile(username):
99 98 return _render('user_profile', username)
100 99
101 100 auth_user_group_list = UserGroupList(
102 101 UserGroup.query().all(), perm_set=['usergroup.admin'])
103 102
104 103 allowed_ids = [-1]
105 104 for user_group in auth_user_group_list:
106 105 allowed_ids.append(user_group.users_group_id)
107 106
108 107 user_groups_data_total_count = UserGroup.query()\
109 108 .filter(UserGroup.users_group_id.in_(allowed_ids))\
110 109 .count()
111 110
112 111 member_count = count(UserGroupMember.user_id)
113 112 base_q = Session.query(
114 113 UserGroup.users_group_name,
115 114 UserGroup.user_group_description,
116 115 UserGroup.users_group_active,
117 116 UserGroup.users_group_id,
118 117 UserGroup.group_data,
119 118 User,
120 119 member_count.label('member_count')
121 120 ) \
122 121 .filter(UserGroup.users_group_id.in_(allowed_ids)) \
123 122 .outerjoin(UserGroupMember) \
124 123 .join(User, User.user_id == UserGroup.user_id) \
125 124 .group_by(UserGroup, User)
126 125
127 126 if search_q:
128 127 like_expression = u'%{}%'.format(safe_unicode(search_q))
129 128 base_q = base_q.filter(or_(
130 129 UserGroup.users_group_name.ilike(like_expression),
131 130 ))
132 131
133 132 user_groups_data_total_filtered_count = base_q.count()
134 133
135 134 if order_by == 'members_total':
136 135 sort_col = member_count
137 136 elif order_by == 'user_username':
138 137 sort_col = User.username
139 138 else:
140 139 sort_col = getattr(UserGroup, order_by, None)
141 140
142 141 if isinstance(sort_col, count) or sort_col:
143 142 if order_dir == 'asc':
144 143 sort_col = sort_col.asc()
145 144 else:
146 145 sort_col = sort_col.desc()
147 146
148 147 base_q = base_q.order_by(sort_col)
149 148 base_q = base_q.offset(start).limit(limit)
150 149
151 150 # authenticated access to user groups
152 151 auth_user_group_list = base_q.all()
153 152
154 153 user_groups_data = []
155 154 for user_gr in auth_user_group_list:
156 155 user_groups_data.append({
157 156 "users_group_name": user_group_name(
158 157 user_gr.users_group_id, h.escape(user_gr.users_group_name)),
159 158 "name_raw": h.escape(user_gr.users_group_name),
160 159 "description": h.escape(user_gr.user_group_description),
161 160 "members": user_gr.member_count,
162 161 # NOTE(marcink): because of advanced query we
163 162 # need to load it like that
164 163 "sync": UserGroup._load_group_data(
165 164 user_gr.group_data).get('extern_type'),
166 165 "active": h.bool2icon(user_gr.users_group_active),
167 166 "owner": user_profile(user_gr.User.username),
168 167 "action": user_group_actions(
169 168 user_gr.users_group_id, user_gr.users_group_name)
170 169 })
171 170
172 171 data = ({
173 172 'draw': draw,
174 173 'data': user_groups_data,
175 174 'recordsTotal': user_groups_data_total_count,
176 175 'recordsFiltered': user_groups_data_total_filtered_count,
177 176 })
178 177
179 178 return data
180 179
181 180 @LoginRequired()
182 181 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
183 182 @view_config(
184 183 route_name='user_groups_new', request_method='GET',
185 184 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
186 185 def user_groups_new(self):
187 186 c = self.load_default_context()
188 187 return self._get_template_context(c)
189 188
190 189 @LoginRequired()
191 190 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
192 191 @CSRFRequired()
193 192 @view_config(
194 193 route_name='user_groups_create', request_method='POST',
195 194 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
196 195 def user_groups_create(self):
197 196 _ = self.request.translate
198 197 c = self.load_default_context()
199 198 users_group_form = UserGroupForm(self.request.translate)()
200 199
201 200 user_group_name = self.request.POST.get('users_group_name')
202 201 try:
203 202 form_result = users_group_form.to_python(dict(self.request.POST))
204 203 user_group = UserGroupModel().create(
205 204 name=form_result['users_group_name'],
206 205 description=form_result['user_group_description'],
207 206 owner=self._rhodecode_user.user_id,
208 207 active=form_result['users_group_active'])
209 208 Session().flush()
210 209 creation_data = user_group.get_api_data()
211 210 user_group_name = form_result['users_group_name']
212 211
213 212 audit_logger.store_web(
214 213 'user_group.create', action_data={'data': creation_data},
215 214 user=self._rhodecode_user)
216 215
217 216 user_group_link = h.link_to(
218 217 h.escape(user_group_name),
219 218 h.route_path(
220 219 'edit_user_group', user_group_id=user_group.users_group_id))
221 220 h.flash(h.literal(_('Created user group %(user_group_link)s')
222 221 % {'user_group_link': user_group_link}),
223 222 category='success')
224 223 Session().commit()
225 224 user_group_id = user_group.users_group_id
226 225 except formencode.Invalid as errors:
227 226
228 227 data = render(
229 228 'rhodecode:templates/admin/user_groups/user_group_add.mako',
230 229 self._get_template_context(c), self.request)
231 230 html = formencode.htmlfill.render(
232 231 data,
233 232 defaults=errors.value,
234 233 errors=errors.error_dict or {},
235 234 prefix_error=False,
236 235 encoding="UTF-8",
237 236 force_defaults=False
238 237 )
239 238 return Response(html)
240 239
241 240 except Exception:
242 241 log.exception("Exception creating user group")
243 242 h.flash(_('Error occurred during creation of user group %s') \
244 243 % user_group_name, category='error')
245 244 raise HTTPFound(h.route_path('user_groups_new'))
246 245
247 246 raise HTTPFound(
248 247 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -1,1134 +1,1140 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20 import mock
21 21 import pytest
22 22
23 23 import rhodecode
24 24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 25 from rhodecode.lib.vcs.nodes import FileNode
26 26 from rhodecode.lib import helpers as h
27 27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 28 from rhodecode.model.db import (
29 29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 30 from rhodecode.model.meta import Session
31 31 from rhodecode.model.pull_request import PullRequestModel
32 32 from rhodecode.model.user import UserModel
33 33 from rhodecode.tests import (
34 34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 35 from rhodecode.tests.utils import AssertResponse
36 36
37 37
38 38 def route_path(name, params=None, **kwargs):
39 39 import urllib
40 40
41 41 base_url = {
42 42 'repo_changelog': '/{repo_name}/changelog',
43 43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 56 }[name].format(**kwargs)
57 57
58 58 if params:
59 59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 60 return base_url
61 61
62 62
63 63 @pytest.mark.usefixtures('app', 'autologin_user')
64 64 @pytest.mark.backends("git", "hg")
65 65 class TestPullrequestsView(object):
66 66
67 67 def test_index(self, backend):
68 68 self.app.get(route_path(
69 69 'pullrequest_new',
70 70 repo_name=backend.repo_name))
71 71
72 72 def test_option_menu_create_pull_request_exists(self, backend):
73 73 repo_name = backend.repo_name
74 74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75 75
76 76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 77 'pullrequest_new', repo_name=repo_name)
78 78 response.mustcontain(create_pr_link)
79 79
80 80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 81 repo = backend.repo
82 82
83 83 self.app.get(
84 84 route_path('pullrequest_new',
85 85 repo_name=repo.repo_name,
86 86 commit=repo.get_commit().raw_id),
87 87 status=200)
88 88
89 89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 90 def test_show(self, pr_util, pr_merge_enabled):
91 91 pull_request = pr_util.create_pull_request(
92 92 mergeable=pr_merge_enabled, enable_notifications=False)
93 93
94 94 response = self.app.get(route_path(
95 95 'pullrequest_show',
96 96 repo_name=pull_request.target_repo.scm_instance().name,
97 97 pull_request_id=pull_request.pull_request_id))
98 98
99 99 for commit_id in pull_request.revisions:
100 100 response.mustcontain(commit_id)
101 101
102 102 assert pull_request.target_ref_parts.type in response
103 103 assert pull_request.target_ref_parts.name in response
104 104 target_clone_url = pull_request.target_repo.clone_url()
105 105 assert target_clone_url in response
106 106
107 107 assert 'class="pull-request-merge"' in response
108 108 assert (
109 109 'Server-side pull request merging is disabled.'
110 110 in response) != pr_merge_enabled
111 111
112 112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 113 # Logout
114 114 response = self.app.post(
115 115 h.route_path('logout'),
116 116 params={'csrf_token': csrf_token})
117 117 # Login as regular user
118 118 response = self.app.post(h.route_path('login'),
119 119 {'username': TEST_USER_REGULAR_LOGIN,
120 120 'password': 'test12'})
121 121
122 122 pull_request = pr_util.create_pull_request(
123 123 author=TEST_USER_REGULAR_LOGIN)
124 124
125 125 response = self.app.get(route_path(
126 126 'pullrequest_show',
127 127 repo_name=pull_request.target_repo.scm_instance().name,
128 128 pull_request_id=pull_request.pull_request_id))
129 129
130 130 response.mustcontain('Server-side pull request merging is disabled.')
131 131
132 132 assert_response = response.assert_response()
133 133 # for regular user without a merge permissions, we don't see it
134 134 assert_response.no_element_exists('#close-pull-request-action')
135 135
136 136 user_util.grant_user_permission_to_repo(
137 137 pull_request.target_repo,
138 138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 139 'repository.write')
140 140 response = self.app.get(route_path(
141 141 'pullrequest_show',
142 142 repo_name=pull_request.target_repo.scm_instance().name,
143 143 pull_request_id=pull_request.pull_request_id))
144 144
145 145 response.mustcontain('Server-side pull request merging is disabled.')
146 146
147 147 assert_response = response.assert_response()
148 148 # now regular user has a merge permissions, we have CLOSE button
149 149 assert_response.one_element_exists('#close-pull-request-action')
150 150
151 151 def test_show_invalid_commit_id(self, pr_util):
152 152 # Simulating invalid revisions which will cause a lookup error
153 153 pull_request = pr_util.create_pull_request()
154 154 pull_request.revisions = ['invalid']
155 155 Session().add(pull_request)
156 156 Session().commit()
157 157
158 158 response = self.app.get(route_path(
159 159 'pullrequest_show',
160 160 repo_name=pull_request.target_repo.scm_instance().name,
161 161 pull_request_id=pull_request.pull_request_id))
162 162
163 163 for commit_id in pull_request.revisions:
164 164 response.mustcontain(commit_id)
165 165
166 166 def test_show_invalid_source_reference(self, pr_util):
167 167 pull_request = pr_util.create_pull_request()
168 168 pull_request.source_ref = 'branch:b:invalid'
169 169 Session().add(pull_request)
170 170 Session().commit()
171 171
172 172 self.app.get(route_path(
173 173 'pullrequest_show',
174 174 repo_name=pull_request.target_repo.scm_instance().name,
175 175 pull_request_id=pull_request.pull_request_id))
176 176
177 177 def test_edit_title_description(self, pr_util, csrf_token):
178 178 pull_request = pr_util.create_pull_request()
179 179 pull_request_id = pull_request.pull_request_id
180 180
181 181 response = self.app.post(
182 182 route_path('pullrequest_update',
183 183 repo_name=pull_request.target_repo.repo_name,
184 184 pull_request_id=pull_request_id),
185 185 params={
186 186 'edit_pull_request': 'true',
187 187 'title': 'New title',
188 188 'description': 'New description',
189 189 'csrf_token': csrf_token})
190 190
191 191 assert_session_flash(
192 192 response, u'Pull request title & description updated.',
193 193 category='success')
194 194
195 195 pull_request = PullRequest.get(pull_request_id)
196 196 assert pull_request.title == 'New title'
197 197 assert pull_request.description == 'New description'
198 198
199 199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 200 pull_request = pr_util.create_pull_request()
201 201 pull_request_id = pull_request.pull_request_id
202 202 repo_name = pull_request.target_repo.repo_name
203 203 pr_util.close()
204 204
205 205 response = self.app.post(
206 206 route_path('pullrequest_update',
207 207 repo_name=repo_name, pull_request_id=pull_request_id),
208 208 params={
209 209 'edit_pull_request': 'true',
210 210 'title': 'New title',
211 211 'description': 'New description',
212 212 'csrf_token': csrf_token}, status=200)
213 213 assert_session_flash(
214 214 response, u'Cannot update closed pull requests.',
215 215 category='error')
216 216
217 217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219 219
220 220 pull_request = pr_util.create_pull_request()
221 221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 222 Session().add(pull_request)
223 223 Session().commit()
224 224
225 225 pull_request_id = pull_request.pull_request_id
226 226
227 227 response = self.app.post(
228 228 route_path('pullrequest_update',
229 229 repo_name=pull_request.target_repo.repo_name,
230 230 pull_request_id=pull_request_id),
231 231 params={'update_commits': 'true',
232 232 'csrf_token': csrf_token})
233 233
234 234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 235 UpdateFailureReason.MISSING_SOURCE_REF])
236 236 assert_session_flash(response, expected_msg, category='error')
237 237
238 238 def test_missing_target_reference(self, pr_util, csrf_token):
239 239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 240 pull_request = pr_util.create_pull_request(
241 241 approved=True, mergeable=True)
242 242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 243 Session().add(pull_request)
244 244 Session().commit()
245 245
246 246 pull_request_id = pull_request.pull_request_id
247 247 pull_request_url = route_path(
248 248 'pullrequest_show',
249 249 repo_name=pull_request.target_repo.repo_name,
250 250 pull_request_id=pull_request_id)
251 251
252 252 response = self.app.get(pull_request_url)
253 253
254 254 assertr = AssertResponse(response)
255 255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 256 MergeFailureReason.MISSING_TARGET_REF]
257 257 assertr.element_contains(
258 258 'span[data-role="merge-message"]', str(expected_msg))
259 259
260 260 def test_comment_and_close_pull_request_custom_message_approved(
261 261 self, pr_util, csrf_token, xhr_header):
262 262
263 263 pull_request = pr_util.create_pull_request(approved=True)
264 264 pull_request_id = pull_request.pull_request_id
265 265 author = pull_request.user_id
266 266 repo = pull_request.target_repo.repo_id
267 267
268 268 self.app.post(
269 269 route_path('pullrequest_comment_create',
270 270 repo_name=pull_request.target_repo.scm_instance().name,
271 271 pull_request_id=pull_request_id),
272 272 params={
273 273 'close_pull_request': '1',
274 274 'text': 'Closing a PR',
275 275 'csrf_token': csrf_token},
276 276 extra_environ=xhr_header,)
277 277
278 278 journal = UserLog.query()\
279 279 .filter(UserLog.user_id == author)\
280 280 .filter(UserLog.repository_id == repo) \
281 281 .order_by('user_log_id') \
282 282 .all()
283 283 assert journal[-1].action == 'repo.pull_request.close'
284 284
285 285 pull_request = PullRequest.get(pull_request_id)
286 286 assert pull_request.is_closed()
287 287
288 288 status = ChangesetStatusModel().get_status(
289 289 pull_request.source_repo, pull_request=pull_request)
290 290 assert status == ChangesetStatus.STATUS_APPROVED
291 291 comments = ChangesetComment().query() \
292 292 .filter(ChangesetComment.pull_request == pull_request) \
293 293 .order_by(ChangesetComment.comment_id.asc())\
294 294 .all()
295 295 assert comments[-1].text == 'Closing a PR'
296 296
297 297 def test_comment_force_close_pull_request_rejected(
298 298 self, pr_util, csrf_token, xhr_header):
299 299 pull_request = pr_util.create_pull_request()
300 300 pull_request_id = pull_request.pull_request_id
301 301 PullRequestModel().update_reviewers(
302 pull_request_id, [(1, ['reason'], False), (2, ['reason2'], False)],
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 303 pull_request.author)
304 304 author = pull_request.user_id
305 305 repo = pull_request.target_repo.repo_id
306 306
307 307 self.app.post(
308 308 route_path('pullrequest_comment_create',
309 309 repo_name=pull_request.target_repo.scm_instance().name,
310 310 pull_request_id=pull_request_id),
311 311 params={
312 312 'close_pull_request': '1',
313 313 'csrf_token': csrf_token},
314 314 extra_environ=xhr_header)
315 315
316 316 pull_request = PullRequest.get(pull_request_id)
317 317
318 318 journal = UserLog.query()\
319 319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 320 .order_by('user_log_id') \
321 321 .all()
322 322 assert journal[-1].action == 'repo.pull_request.close'
323 323
324 324 # check only the latest status, not the review status
325 325 status = ChangesetStatusModel().get_status(
326 326 pull_request.source_repo, pull_request=pull_request)
327 327 assert status == ChangesetStatus.STATUS_REJECTED
328 328
329 329 def test_comment_and_close_pull_request(
330 330 self, pr_util, csrf_token, xhr_header):
331 331 pull_request = pr_util.create_pull_request()
332 332 pull_request_id = pull_request.pull_request_id
333 333
334 334 response = self.app.post(
335 335 route_path('pullrequest_comment_create',
336 336 repo_name=pull_request.target_repo.scm_instance().name,
337 337 pull_request_id=pull_request.pull_request_id),
338 338 params={
339 339 'close_pull_request': 'true',
340 340 'csrf_token': csrf_token},
341 341 extra_environ=xhr_header)
342 342
343 343 assert response.json
344 344
345 345 pull_request = PullRequest.get(pull_request_id)
346 346 assert pull_request.is_closed()
347 347
348 348 # check only the latest status, not the review status
349 349 status = ChangesetStatusModel().get_status(
350 350 pull_request.source_repo, pull_request=pull_request)
351 351 assert status == ChangesetStatus.STATUS_REJECTED
352 352
353 353 def test_create_pull_request(self, backend, csrf_token):
354 354 commits = [
355 355 {'message': 'ancestor'},
356 356 {'message': 'change'},
357 357 {'message': 'change2'},
358 358 ]
359 359 commit_ids = backend.create_master_repo(commits)
360 360 target = backend.create_repo(heads=['ancestor'])
361 361 source = backend.create_repo(heads=['change2'])
362 362
363 363 response = self.app.post(
364 364 route_path('pullrequest_create', repo_name=source.repo_name),
365 365 [
366 366 ('source_repo', source.repo_name),
367 367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 368 ('target_repo', target.repo_name),
369 369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 370 ('common_ancestor', commit_ids['ancestor']),
371 371 ('pullrequest_desc', 'Description'),
372 372 ('pullrequest_title', 'Title'),
373 373 ('__start__', 'review_members:sequence'),
374 374 ('__start__', 'reviewer:mapping'),
375 375 ('user_id', '1'),
376 376 ('__start__', 'reasons:sequence'),
377 377 ('reason', 'Some reason'),
378 378 ('__end__', 'reasons:sequence'),
379 ('__start__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
379 381 ('mandatory', 'False'),
380 382 ('__end__', 'reviewer:mapping'),
381 383 ('__end__', 'review_members:sequence'),
382 384 ('__start__', 'revisions:sequence'),
383 385 ('revisions', commit_ids['change']),
384 386 ('revisions', commit_ids['change2']),
385 387 ('__end__', 'revisions:sequence'),
386 388 ('user', ''),
387 389 ('csrf_token', csrf_token),
388 390 ],
389 391 status=302)
390 392
391 393 location = response.headers['Location']
392 394 pull_request_id = location.rsplit('/', 1)[1]
393 395 assert pull_request_id != 'new'
394 396 pull_request = PullRequest.get(int(pull_request_id))
395 397
396 398 # check that we have now both revisions
397 399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
398 400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
399 401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
400 402 assert pull_request.target_ref == expected_target_ref
401 403
402 404 def test_reviewer_notifications(self, backend, csrf_token):
403 405 # We have to use the app.post for this test so it will create the
404 406 # notifications properly with the new PR
405 407 commits = [
406 408 {'message': 'ancestor',
407 409 'added': [FileNode('file_A', content='content_of_ancestor')]},
408 410 {'message': 'change',
409 411 'added': [FileNode('file_a', content='content_of_change')]},
410 412 {'message': 'change-child'},
411 413 {'message': 'ancestor-child', 'parents': ['ancestor'],
412 414 'added': [
413 415 FileNode('file_B', content='content_of_ancestor_child')]},
414 416 {'message': 'ancestor-child-2'},
415 417 ]
416 418 commit_ids = backend.create_master_repo(commits)
417 419 target = backend.create_repo(heads=['ancestor-child'])
418 420 source = backend.create_repo(heads=['change'])
419 421
420 422 response = self.app.post(
421 423 route_path('pullrequest_create', repo_name=source.repo_name),
422 424 [
423 425 ('source_repo', source.repo_name),
424 426 ('source_ref', 'branch:default:' + commit_ids['change']),
425 427 ('target_repo', target.repo_name),
426 428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
427 429 ('common_ancestor', commit_ids['ancestor']),
428 430 ('pullrequest_desc', 'Description'),
429 431 ('pullrequest_title', 'Title'),
430 432 ('__start__', 'review_members:sequence'),
431 433 ('__start__', 'reviewer:mapping'),
432 434 ('user_id', '2'),
433 435 ('__start__', 'reasons:sequence'),
434 436 ('reason', 'Some reason'),
435 437 ('__end__', 'reasons:sequence'),
438 ('__start__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
436 440 ('mandatory', 'False'),
437 441 ('__end__', 'reviewer:mapping'),
438 442 ('__end__', 'review_members:sequence'),
439 443 ('__start__', 'revisions:sequence'),
440 444 ('revisions', commit_ids['change']),
441 445 ('__end__', 'revisions:sequence'),
442 446 ('user', ''),
443 447 ('csrf_token', csrf_token),
444 448 ],
445 449 status=302)
446 450
447 451 location = response.headers['Location']
448 452
449 453 pull_request_id = location.rsplit('/', 1)[1]
450 454 assert pull_request_id != 'new'
451 455 pull_request = PullRequest.get(int(pull_request_id))
452 456
453 457 # Check that a notification was made
454 458 notifications = Notification.query()\
455 459 .filter(Notification.created_by == pull_request.author.user_id,
456 460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
457 461 Notification.subject.contains(
458 462 "wants you to review pull request #%s" % pull_request_id))
459 463 assert len(notifications.all()) == 1
460 464
461 465 # Change reviewers and check that a notification was made
462 466 PullRequestModel().update_reviewers(
463 pull_request.pull_request_id, [(1, [], False)],
467 pull_request.pull_request_id, [(1, [], False, [])],
464 468 pull_request.author)
465 469 assert len(notifications.all()) == 2
466 470
467 471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
468 472 csrf_token):
469 473 commits = [
470 474 {'message': 'ancestor',
471 475 'added': [FileNode('file_A', content='content_of_ancestor')]},
472 476 {'message': 'change',
473 477 'added': [FileNode('file_a', content='content_of_change')]},
474 478 {'message': 'change-child'},
475 479 {'message': 'ancestor-child', 'parents': ['ancestor'],
476 480 'added': [
477 481 FileNode('file_B', content='content_of_ancestor_child')]},
478 482 {'message': 'ancestor-child-2'},
479 483 ]
480 484 commit_ids = backend.create_master_repo(commits)
481 485 target = backend.create_repo(heads=['ancestor-child'])
482 486 source = backend.create_repo(heads=['change'])
483 487
484 488 response = self.app.post(
485 489 route_path('pullrequest_create', repo_name=source.repo_name),
486 490 [
487 491 ('source_repo', source.repo_name),
488 492 ('source_ref', 'branch:default:' + commit_ids['change']),
489 493 ('target_repo', target.repo_name),
490 494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
491 495 ('common_ancestor', commit_ids['ancestor']),
492 496 ('pullrequest_desc', 'Description'),
493 497 ('pullrequest_title', 'Title'),
494 498 ('__start__', 'review_members:sequence'),
495 499 ('__start__', 'reviewer:mapping'),
496 500 ('user_id', '1'),
497 501 ('__start__', 'reasons:sequence'),
498 502 ('reason', 'Some reason'),
499 503 ('__end__', 'reasons:sequence'),
504 ('__start__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
500 506 ('mandatory', 'False'),
501 507 ('__end__', 'reviewer:mapping'),
502 508 ('__end__', 'review_members:sequence'),
503 509 ('__start__', 'revisions:sequence'),
504 510 ('revisions', commit_ids['change']),
505 511 ('__end__', 'revisions:sequence'),
506 512 ('user', ''),
507 513 ('csrf_token', csrf_token),
508 514 ],
509 515 status=302)
510 516
511 517 location = response.headers['Location']
512 518
513 519 pull_request_id = location.rsplit('/', 1)[1]
514 520 assert pull_request_id != 'new'
515 521 pull_request = PullRequest.get(int(pull_request_id))
516 522
517 523 # target_ref has to point to the ancestor's commit_id in order to
518 524 # show the correct diff
519 525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
520 526 assert pull_request.target_ref == expected_target_ref
521 527
522 528 # Check generated diff contents
523 529 response = response.follow()
524 530 assert 'content_of_ancestor' not in response.body
525 531 assert 'content_of_ancestor-child' not in response.body
526 532 assert 'content_of_change' in response.body
527 533
528 534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
529 535 # Clear any previous calls to rcextensions
530 536 rhodecode.EXTENSIONS.calls.clear()
531 537
532 538 pull_request = pr_util.create_pull_request(
533 539 approved=True, mergeable=True)
534 540 pull_request_id = pull_request.pull_request_id
535 541 repo_name = pull_request.target_repo.scm_instance().name,
536 542
537 543 response = self.app.post(
538 544 route_path('pullrequest_merge',
539 545 repo_name=str(repo_name[0]),
540 546 pull_request_id=pull_request_id),
541 547 params={'csrf_token': csrf_token}).follow()
542 548
543 549 pull_request = PullRequest.get(pull_request_id)
544 550
545 551 assert response.status_int == 200
546 552 assert pull_request.is_closed()
547 553 assert_pull_request_status(
548 554 pull_request, ChangesetStatus.STATUS_APPROVED)
549 555
550 556 # Check the relevant log entries were added
551 557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
552 558 actions = [log.action for log in user_logs]
553 559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
554 560 expected_actions = [
555 561 u'repo.pull_request.close',
556 562 u'repo.pull_request.merge',
557 563 u'repo.pull_request.comment.create'
558 564 ]
559 565 assert actions == expected_actions
560 566
561 567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
562 568 actions = [log for log in user_logs]
563 569 assert actions[-1].action == 'user.push'
564 570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
565 571
566 572 # Check post_push rcextension was really executed
567 573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
568 574 assert len(push_calls) == 1
569 575 unused_last_call_args, last_call_kwargs = push_calls[0]
570 576 assert last_call_kwargs['action'] == 'push'
571 577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
572 578
573 579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
574 580 pull_request = pr_util.create_pull_request(mergeable=False)
575 581 pull_request_id = pull_request.pull_request_id
576 582 pull_request = PullRequest.get(pull_request_id)
577 583
578 584 response = self.app.post(
579 585 route_path('pullrequest_merge',
580 586 repo_name=pull_request.target_repo.scm_instance().name,
581 587 pull_request_id=pull_request.pull_request_id),
582 588 params={'csrf_token': csrf_token}).follow()
583 589
584 590 assert response.status_int == 200
585 591 response.mustcontain(
586 592 'Merge is not currently possible because of below failed checks.')
587 593 response.mustcontain('Server-side pull request merging is disabled.')
588 594
589 595 @pytest.mark.skip_backends('svn')
590 596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
591 597 pull_request = pr_util.create_pull_request(mergeable=True)
592 598 pull_request_id = pull_request.pull_request_id
593 599 repo_name = pull_request.target_repo.scm_instance().name
594 600
595 601 response = self.app.post(
596 602 route_path('pullrequest_merge',
597 603 repo_name=repo_name,
598 604 pull_request_id=pull_request_id),
599 605 params={'csrf_token': csrf_token}).follow()
600 606
601 607 assert response.status_int == 200
602 608
603 609 response.mustcontain(
604 610 'Merge is not currently possible because of below failed checks.')
605 611 response.mustcontain('Pull request reviewer approval is pending.')
606 612
607 613 def test_merge_pull_request_renders_failure_reason(
608 614 self, user_regular, csrf_token, pr_util):
609 615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
610 616 pull_request_id = pull_request.pull_request_id
611 617 repo_name = pull_request.target_repo.scm_instance().name
612 618
613 619 model_patcher = mock.patch.multiple(
614 620 PullRequestModel,
615 621 merge=mock.Mock(return_value=MergeResponse(
616 622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
617 623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
618 624
619 625 with model_patcher:
620 626 response = self.app.post(
621 627 route_path('pullrequest_merge',
622 628 repo_name=repo_name,
623 629 pull_request_id=pull_request_id),
624 630 params={'csrf_token': csrf_token}, status=302)
625 631
626 632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
627 633 MergeFailureReason.PUSH_FAILED])
628 634
629 635 def test_update_source_revision(self, backend, csrf_token):
630 636 commits = [
631 637 {'message': 'ancestor'},
632 638 {'message': 'change'},
633 639 {'message': 'change-2'},
634 640 ]
635 641 commit_ids = backend.create_master_repo(commits)
636 642 target = backend.create_repo(heads=['ancestor'])
637 643 source = backend.create_repo(heads=['change'])
638 644
639 645 # create pr from a in source to A in target
640 646 pull_request = PullRequest()
641 647 pull_request.source_repo = source
642 648 # TODO: johbo: Make sure that we write the source ref this way!
643 649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
644 650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
645 651 pull_request.target_repo = target
646 652
647 653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
648 654 branch=backend.default_branch_name,
649 655 commit_id=commit_ids['ancestor'])
650 656 pull_request.revisions = [commit_ids['change']]
651 657 pull_request.title = u"Test"
652 658 pull_request.description = u"Description"
653 659 pull_request.author = UserModel().get_by_username(
654 660 TEST_USER_ADMIN_LOGIN)
655 661 Session().add(pull_request)
656 662 Session().commit()
657 663 pull_request_id = pull_request.pull_request_id
658 664
659 665 # source has ancestor - change - change-2
660 666 backend.pull_heads(source, heads=['change-2'])
661 667
662 668 # update PR
663 669 self.app.post(
664 670 route_path('pullrequest_update',
665 671 repo_name=target.repo_name,
666 672 pull_request_id=pull_request_id),
667 673 params={'update_commits': 'true',
668 674 'csrf_token': csrf_token})
669 675
670 676 # check that we have now both revisions
671 677 pull_request = PullRequest.get(pull_request_id)
672 678 assert pull_request.revisions == [
673 679 commit_ids['change-2'], commit_ids['change']]
674 680
675 681 # TODO: johbo: this should be a test on its own
676 682 response = self.app.get(route_path(
677 683 'pullrequest_new',
678 684 repo_name=target.repo_name))
679 685 assert response.status_int == 200
680 686 assert 'Pull request updated to' in response.body
681 687 assert 'with 1 added, 0 removed commits.' in response.body
682 688
683 689 def test_update_target_revision(self, backend, csrf_token):
684 690 commits = [
685 691 {'message': 'ancestor'},
686 692 {'message': 'change'},
687 693 {'message': 'ancestor-new', 'parents': ['ancestor']},
688 694 {'message': 'change-rebased'},
689 695 ]
690 696 commit_ids = backend.create_master_repo(commits)
691 697 target = backend.create_repo(heads=['ancestor'])
692 698 source = backend.create_repo(heads=['change'])
693 699
694 700 # create pr from a in source to A in target
695 701 pull_request = PullRequest()
696 702 pull_request.source_repo = source
697 703 # TODO: johbo: Make sure that we write the source ref this way!
698 704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
699 705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
700 706 pull_request.target_repo = target
701 707 # TODO: johbo: Target ref should be branch based, since tip can jump
702 708 # from branch to branch
703 709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
704 710 branch=backend.default_branch_name,
705 711 commit_id=commit_ids['ancestor'])
706 712 pull_request.revisions = [commit_ids['change']]
707 713 pull_request.title = u"Test"
708 714 pull_request.description = u"Description"
709 715 pull_request.author = UserModel().get_by_username(
710 716 TEST_USER_ADMIN_LOGIN)
711 717 Session().add(pull_request)
712 718 Session().commit()
713 719 pull_request_id = pull_request.pull_request_id
714 720
715 721 # target has ancestor - ancestor-new
716 722 # source has ancestor - ancestor-new - change-rebased
717 723 backend.pull_heads(target, heads=['ancestor-new'])
718 724 backend.pull_heads(source, heads=['change-rebased'])
719 725
720 726 # update PR
721 727 self.app.post(
722 728 route_path('pullrequest_update',
723 729 repo_name=target.repo_name,
724 730 pull_request_id=pull_request_id),
725 731 params={'update_commits': 'true',
726 732 'csrf_token': csrf_token},
727 733 status=200)
728 734
729 735 # check that we have now both revisions
730 736 pull_request = PullRequest.get(pull_request_id)
731 737 assert pull_request.revisions == [commit_ids['change-rebased']]
732 738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
733 739 branch=backend.default_branch_name,
734 740 commit_id=commit_ids['ancestor-new'])
735 741
736 742 # TODO: johbo: This should be a test on its own
737 743 response = self.app.get(route_path(
738 744 'pullrequest_new',
739 745 repo_name=target.repo_name))
740 746 assert response.status_int == 200
741 747 assert 'Pull request updated to' in response.body
742 748 assert 'with 1 added, 1 removed commits.' in response.body
743 749
744 750 def test_update_of_ancestor_reference(self, backend, csrf_token):
745 751 commits = [
746 752 {'message': 'ancestor'},
747 753 {'message': 'change'},
748 754 {'message': 'change-2'},
749 755 {'message': 'ancestor-new', 'parents': ['ancestor']},
750 756 {'message': 'change-rebased'},
751 757 ]
752 758 commit_ids = backend.create_master_repo(commits)
753 759 target = backend.create_repo(heads=['ancestor'])
754 760 source = backend.create_repo(heads=['change'])
755 761
756 762 # create pr from a in source to A in target
757 763 pull_request = PullRequest()
758 764 pull_request.source_repo = source
759 765 # TODO: johbo: Make sure that we write the source ref this way!
760 766 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
761 767 branch=backend.default_branch_name,
762 768 commit_id=commit_ids['change'])
763 769 pull_request.target_repo = target
764 770 # TODO: johbo: Target ref should be branch based, since tip can jump
765 771 # from branch to branch
766 772 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
767 773 branch=backend.default_branch_name,
768 774 commit_id=commit_ids['ancestor'])
769 775 pull_request.revisions = [commit_ids['change']]
770 776 pull_request.title = u"Test"
771 777 pull_request.description = u"Description"
772 778 pull_request.author = UserModel().get_by_username(
773 779 TEST_USER_ADMIN_LOGIN)
774 780 Session().add(pull_request)
775 781 Session().commit()
776 782 pull_request_id = pull_request.pull_request_id
777 783
778 784 # target has ancestor - ancestor-new
779 785 # source has ancestor - ancestor-new - change-rebased
780 786 backend.pull_heads(target, heads=['ancestor-new'])
781 787 backend.pull_heads(source, heads=['change-rebased'])
782 788
783 789 # update PR
784 790 self.app.post(
785 791 route_path('pullrequest_update',
786 792 repo_name=target.repo_name,
787 793 pull_request_id=pull_request_id),
788 794 params={'update_commits': 'true',
789 795 'csrf_token': csrf_token},
790 796 status=200)
791 797
792 798 # Expect the target reference to be updated correctly
793 799 pull_request = PullRequest.get(pull_request_id)
794 800 assert pull_request.revisions == [commit_ids['change-rebased']]
795 801 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
796 802 branch=backend.default_branch_name,
797 803 commit_id=commit_ids['ancestor-new'])
798 804 assert pull_request.target_ref == expected_target_ref
799 805
800 806 def test_remove_pull_request_branch(self, backend_git, csrf_token):
801 807 branch_name = 'development'
802 808 commits = [
803 809 {'message': 'initial-commit'},
804 810 {'message': 'old-feature'},
805 811 {'message': 'new-feature', 'branch': branch_name},
806 812 ]
807 813 repo = backend_git.create_repo(commits)
808 814 commit_ids = backend_git.commit_ids
809 815
810 816 pull_request = PullRequest()
811 817 pull_request.source_repo = repo
812 818 pull_request.target_repo = repo
813 819 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
814 820 branch=branch_name, commit_id=commit_ids['new-feature'])
815 821 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
816 822 branch=backend_git.default_branch_name,
817 823 commit_id=commit_ids['old-feature'])
818 824 pull_request.revisions = [commit_ids['new-feature']]
819 825 pull_request.title = u"Test"
820 826 pull_request.description = u"Description"
821 827 pull_request.author = UserModel().get_by_username(
822 828 TEST_USER_ADMIN_LOGIN)
823 829 Session().add(pull_request)
824 830 Session().commit()
825 831
826 832 vcs = repo.scm_instance()
827 833 vcs.remove_ref('refs/heads/{}'.format(branch_name))
828 834
829 835 response = self.app.get(route_path(
830 836 'pullrequest_show',
831 837 repo_name=repo.repo_name,
832 838 pull_request_id=pull_request.pull_request_id))
833 839
834 840 assert response.status_int == 200
835 841 assert_response = AssertResponse(response)
836 842 assert_response.element_contains(
837 843 '#changeset_compare_view_content .alert strong',
838 844 'Missing commits')
839 845 assert_response.element_contains(
840 846 '#changeset_compare_view_content .alert',
841 847 'This pull request cannot be displayed, because one or more'
842 848 ' commits no longer exist in the source repository.')
843 849
844 850 def test_strip_commits_from_pull_request(
845 851 self, backend, pr_util, csrf_token):
846 852 commits = [
847 853 {'message': 'initial-commit'},
848 854 {'message': 'old-feature'},
849 855 {'message': 'new-feature', 'parents': ['initial-commit']},
850 856 ]
851 857 pull_request = pr_util.create_pull_request(
852 858 commits, target_head='initial-commit', source_head='new-feature',
853 859 revisions=['new-feature'])
854 860
855 861 vcs = pr_util.source_repository.scm_instance()
856 862 if backend.alias == 'git':
857 863 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
858 864 else:
859 865 vcs.strip(pr_util.commit_ids['new-feature'])
860 866
861 867 response = self.app.get(route_path(
862 868 'pullrequest_show',
863 869 repo_name=pr_util.target_repository.repo_name,
864 870 pull_request_id=pull_request.pull_request_id))
865 871
866 872 assert response.status_int == 200
867 873 assert_response = AssertResponse(response)
868 874 assert_response.element_contains(
869 875 '#changeset_compare_view_content .alert strong',
870 876 'Missing commits')
871 877 assert_response.element_contains(
872 878 '#changeset_compare_view_content .alert',
873 879 'This pull request cannot be displayed, because one or more'
874 880 ' commits no longer exist in the source repository.')
875 881 assert_response.element_contains(
876 882 '#update_commits',
877 883 'Update commits')
878 884
879 885 def test_strip_commits_and_update(
880 886 self, backend, pr_util, csrf_token):
881 887 commits = [
882 888 {'message': 'initial-commit'},
883 889 {'message': 'old-feature'},
884 890 {'message': 'new-feature', 'parents': ['old-feature']},
885 891 ]
886 892 pull_request = pr_util.create_pull_request(
887 893 commits, target_head='old-feature', source_head='new-feature',
888 894 revisions=['new-feature'], mergeable=True)
889 895
890 896 vcs = pr_util.source_repository.scm_instance()
891 897 if backend.alias == 'git':
892 898 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
893 899 else:
894 900 vcs.strip(pr_util.commit_ids['new-feature'])
895 901
896 902 response = self.app.post(
897 903 route_path('pullrequest_update',
898 904 repo_name=pull_request.target_repo.repo_name,
899 905 pull_request_id=pull_request.pull_request_id),
900 906 params={'update_commits': 'true',
901 907 'csrf_token': csrf_token})
902 908
903 909 assert response.status_int == 200
904 910 assert response.body == 'true'
905 911
906 912 # Make sure that after update, it won't raise 500 errors
907 913 response = self.app.get(route_path(
908 914 'pullrequest_show',
909 915 repo_name=pr_util.target_repository.repo_name,
910 916 pull_request_id=pull_request.pull_request_id))
911 917
912 918 assert response.status_int == 200
913 919 assert_response = AssertResponse(response)
914 920 assert_response.element_contains(
915 921 '#changeset_compare_view_content .alert strong',
916 922 'Missing commits')
917 923
918 924 def test_branch_is_a_link(self, pr_util):
919 925 pull_request = pr_util.create_pull_request()
920 926 pull_request.source_ref = 'branch:origin:1234567890abcdef'
921 927 pull_request.target_ref = 'branch:target:abcdef1234567890'
922 928 Session().add(pull_request)
923 929 Session().commit()
924 930
925 931 response = self.app.get(route_path(
926 932 'pullrequest_show',
927 933 repo_name=pull_request.target_repo.scm_instance().name,
928 934 pull_request_id=pull_request.pull_request_id))
929 935 assert response.status_int == 200
930 936 assert_response = AssertResponse(response)
931 937
932 938 origin = assert_response.get_element('.pr-origininfo .tag')
933 939 origin_children = origin.getchildren()
934 940 assert len(origin_children) == 1
935 941 target = assert_response.get_element('.pr-targetinfo .tag')
936 942 target_children = target.getchildren()
937 943 assert len(target_children) == 1
938 944
939 945 expected_origin_link = route_path(
940 946 'repo_changelog',
941 947 repo_name=pull_request.source_repo.scm_instance().name,
942 948 params=dict(branch='origin'))
943 949 expected_target_link = route_path(
944 950 'repo_changelog',
945 951 repo_name=pull_request.target_repo.scm_instance().name,
946 952 params=dict(branch='target'))
947 953 assert origin_children[0].attrib['href'] == expected_origin_link
948 954 assert origin_children[0].text == 'branch: origin'
949 955 assert target_children[0].attrib['href'] == expected_target_link
950 956 assert target_children[0].text == 'branch: target'
951 957
952 958 def test_bookmark_is_not_a_link(self, pr_util):
953 959 pull_request = pr_util.create_pull_request()
954 960 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
955 961 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
956 962 Session().add(pull_request)
957 963 Session().commit()
958 964
959 965 response = self.app.get(route_path(
960 966 'pullrequest_show',
961 967 repo_name=pull_request.target_repo.scm_instance().name,
962 968 pull_request_id=pull_request.pull_request_id))
963 969 assert response.status_int == 200
964 970 assert_response = AssertResponse(response)
965 971
966 972 origin = assert_response.get_element('.pr-origininfo .tag')
967 973 assert origin.text.strip() == 'bookmark: origin'
968 974 assert origin.getchildren() == []
969 975
970 976 target = assert_response.get_element('.pr-targetinfo .tag')
971 977 assert target.text.strip() == 'bookmark: target'
972 978 assert target.getchildren() == []
973 979
974 980 def test_tag_is_not_a_link(self, pr_util):
975 981 pull_request = pr_util.create_pull_request()
976 982 pull_request.source_ref = 'tag:origin:1234567890abcdef'
977 983 pull_request.target_ref = 'tag:target:abcdef1234567890'
978 984 Session().add(pull_request)
979 985 Session().commit()
980 986
981 987 response = self.app.get(route_path(
982 988 'pullrequest_show',
983 989 repo_name=pull_request.target_repo.scm_instance().name,
984 990 pull_request_id=pull_request.pull_request_id))
985 991 assert response.status_int == 200
986 992 assert_response = AssertResponse(response)
987 993
988 994 origin = assert_response.get_element('.pr-origininfo .tag')
989 995 assert origin.text.strip() == 'tag: origin'
990 996 assert origin.getchildren() == []
991 997
992 998 target = assert_response.get_element('.pr-targetinfo .tag')
993 999 assert target.text.strip() == 'tag: target'
994 1000 assert target.getchildren() == []
995 1001
996 1002 @pytest.mark.parametrize('mergeable', [True, False])
997 1003 def test_shadow_repository_link(
998 1004 self, mergeable, pr_util, http_host_only_stub):
999 1005 """
1000 1006 Check that the pull request summary page displays a link to the shadow
1001 1007 repository if the pull request is mergeable. If it is not mergeable
1002 1008 the link should not be displayed.
1003 1009 """
1004 1010 pull_request = pr_util.create_pull_request(
1005 1011 mergeable=mergeable, enable_notifications=False)
1006 1012 target_repo = pull_request.target_repo.scm_instance()
1007 1013 pr_id = pull_request.pull_request_id
1008 1014 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1009 1015 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1010 1016
1011 1017 response = self.app.get(route_path(
1012 1018 'pullrequest_show',
1013 1019 repo_name=target_repo.name,
1014 1020 pull_request_id=pr_id))
1015 1021
1016 1022 assertr = AssertResponse(response)
1017 1023 if mergeable:
1018 1024 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1019 1025 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1020 1026 else:
1021 1027 assertr.no_element_exists('.pr-mergeinfo')
1022 1028
1023 1029
1024 1030 @pytest.mark.usefixtures('app')
1025 1031 @pytest.mark.backends("git", "hg")
1026 1032 class TestPullrequestsControllerDelete(object):
1027 1033 def test_pull_request_delete_button_permissions_admin(
1028 1034 self, autologin_user, user_admin, pr_util):
1029 1035 pull_request = pr_util.create_pull_request(
1030 1036 author=user_admin.username, enable_notifications=False)
1031 1037
1032 1038 response = self.app.get(route_path(
1033 1039 'pullrequest_show',
1034 1040 repo_name=pull_request.target_repo.scm_instance().name,
1035 1041 pull_request_id=pull_request.pull_request_id))
1036 1042
1037 1043 response.mustcontain('id="delete_pullrequest"')
1038 1044 response.mustcontain('Confirm to delete this pull request')
1039 1045
1040 1046 def test_pull_request_delete_button_permissions_owner(
1041 1047 self, autologin_regular_user, user_regular, pr_util):
1042 1048 pull_request = pr_util.create_pull_request(
1043 1049 author=user_regular.username, enable_notifications=False)
1044 1050
1045 1051 response = self.app.get(route_path(
1046 1052 'pullrequest_show',
1047 1053 repo_name=pull_request.target_repo.scm_instance().name,
1048 1054 pull_request_id=pull_request.pull_request_id))
1049 1055
1050 1056 response.mustcontain('id="delete_pullrequest"')
1051 1057 response.mustcontain('Confirm to delete this pull request')
1052 1058
1053 1059 def test_pull_request_delete_button_permissions_forbidden(
1054 1060 self, autologin_regular_user, user_regular, user_admin, pr_util):
1055 1061 pull_request = pr_util.create_pull_request(
1056 1062 author=user_admin.username, enable_notifications=False)
1057 1063
1058 1064 response = self.app.get(route_path(
1059 1065 'pullrequest_show',
1060 1066 repo_name=pull_request.target_repo.scm_instance().name,
1061 1067 pull_request_id=pull_request.pull_request_id))
1062 1068 response.mustcontain(no=['id="delete_pullrequest"'])
1063 1069 response.mustcontain(no=['Confirm to delete this pull request'])
1064 1070
1065 1071 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1066 1072 self, autologin_regular_user, user_regular, user_admin, pr_util,
1067 1073 user_util):
1068 1074
1069 1075 pull_request = pr_util.create_pull_request(
1070 1076 author=user_admin.username, enable_notifications=False)
1071 1077
1072 1078 user_util.grant_user_permission_to_repo(
1073 1079 pull_request.target_repo, user_regular,
1074 1080 'repository.write')
1075 1081
1076 1082 response = self.app.get(route_path(
1077 1083 'pullrequest_show',
1078 1084 repo_name=pull_request.target_repo.scm_instance().name,
1079 1085 pull_request_id=pull_request.pull_request_id))
1080 1086
1081 1087 response.mustcontain('id="open_edit_pullrequest"')
1082 1088 response.mustcontain('id="delete_pullrequest"')
1083 1089 response.mustcontain(no=['Confirm to delete this pull request'])
1084 1090
1085 1091 def test_delete_comment_returns_404_if_comment_does_not_exist(
1086 1092 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1087 1093
1088 1094 pull_request = pr_util.create_pull_request(
1089 1095 author=user_admin.username, enable_notifications=False)
1090 1096
1091 1097 self.app.post(
1092 1098 route_path(
1093 1099 'pullrequest_comment_delete',
1094 1100 repo_name=pull_request.target_repo.scm_instance().name,
1095 1101 pull_request_id=pull_request.pull_request_id,
1096 1102 comment_id=1024404),
1097 1103 extra_environ=xhr_header,
1098 1104 params={'csrf_token': csrf_token},
1099 1105 status=404
1100 1106 )
1101 1107
1102 1108 def test_delete_comment(
1103 1109 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1104 1110
1105 1111 pull_request = pr_util.create_pull_request(
1106 1112 author=user_admin.username, enable_notifications=False)
1107 1113 comment = pr_util.create_comment()
1108 1114 comment_id = comment.comment_id
1109 1115
1110 1116 response = self.app.post(
1111 1117 route_path(
1112 1118 'pullrequest_comment_delete',
1113 1119 repo_name=pull_request.target_repo.scm_instance().name,
1114 1120 pull_request_id=pull_request.pull_request_id,
1115 1121 comment_id=comment_id),
1116 1122 extra_environ=xhr_header,
1117 1123 params={'csrf_token': csrf_token},
1118 1124 status=200
1119 1125 )
1120 1126 assert response.body == 'true'
1121 1127
1122 1128
1123 1129 def assert_pull_request_status(pull_request, expected_status):
1124 1130 status = ChangesetStatusModel().calculated_review_status(
1125 1131 pull_request=pull_request)
1126 1132 assert status == expected_status
1127 1133
1128 1134
1129 1135 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1130 1136 @pytest.mark.usefixtures("autologin_user")
1131 1137 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1132 1138 response = app.get(
1133 1139 route_path(route, repo_name=backend_svn.repo_name), status=404)
1134 1140
@@ -1,76 +1,79 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 from rhodecode.lib import helpers as h
22 22 from rhodecode.lib.utils2 import safe_int
23 23
24 24
25 def reviewer_as_json(user, reasons=None, mandatory=False):
25 def reviewer_as_json(user, reasons=None, mandatory=False, rules=None, user_group=None):
26 26 """
27 27 Returns json struct of a reviewer for frontend
28 28
29 29 :param user: the reviewer
30 30 :param reasons: list of strings of why they are reviewers
31 31 :param mandatory: bool, to set user as mandatory
32 32 """
33 33
34 34 return {
35 35 'user_id': user.user_id,
36 36 'reasons': reasons or [],
37 'rules': rules or [],
37 38 'mandatory': mandatory,
39 'user_group': user_group,
38 40 'username': user.username,
39 41 'first_name': user.first_name,
40 42 'last_name': user.last_name,
43 'user_link': h.link_to_user(user),
41 44 'gravatar_link': h.gravatar_url(user.email, 14),
42 45 }
43 46
44 47
45 48 def get_default_reviewers_data(
46 49 current_user, source_repo, source_commit, target_repo, target_commit):
47 50
48 51 """ Return json for default reviewers of a repository """
49 52
50 53 reasons = ['Default reviewer', 'Repository owner']
51 54 default = reviewer_as_json(
52 55 user=current_user, reasons=reasons, mandatory=False)
53 56
54 57 return {
55 58 'api_ver': 'v1', # define version for later possible schema upgrade
56 59 'reviewers': [default],
57 60 'rules': {},
58 61 'rules_data': {},
59 62 }
60 63
61 64
62 65 def validate_default_reviewers(review_members, reviewer_rules):
63 66 """
64 67 Function to validate submitted reviewers against the saved rules
65 68
66 69 """
67 70 reviewers = []
68 71 reviewer_by_id = {}
69 72 for r in review_members:
70 73 reviewer_user_id = safe_int(r['user_id'])
71 entry = (reviewer_user_id, r['reasons'], r['mandatory'])
74 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['rules'])
72 75
73 76 reviewer_by_id[reviewer_user_id] = entry
74 77 reviewers.append(entry)
75 78
76 79 return reviewers
@@ -1,2072 +1,2077 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helper functions
23 23
24 24 Consists of functions to typically be used within templates, but also
25 25 available to Controllers. This module is available to both as 'h'.
26 26 """
27 27
28 28 import random
29 29 import hashlib
30 30 import StringIO
31 31 import urllib
32 32 import math
33 33 import logging
34 34 import re
35 35 import urlparse
36 36 import time
37 37 import string
38 38 import hashlib
39 39 from collections import OrderedDict
40 40
41 41 import pygments
42 42 import itertools
43 43 import fnmatch
44 44
45 45 from datetime import datetime
46 46 from functools import partial
47 47 from pygments.formatters.html import HtmlFormatter
48 48 from pygments import highlight as code_highlight
49 49 from pygments.lexers import (
50 50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51 51
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from webhelpers.html import literal, HTML, escape
55 55 from webhelpers.html.tools import *
56 56 from webhelpers.html.builder import make_tag
57 57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
58 58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
59 59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
60 60 submit, text, password, textarea, title, ul, xml_declaration, radio
61 61 from webhelpers.html.tools import auto_link, button_to, highlight, \
62 62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
63 63 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
64 64 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
65 65 replace_whitespace, urlify, truncate, wrap_paragraphs
66 66 from webhelpers.date import time_ago_in_words
67 67 from webhelpers.paginate import Page as _Page
68 68 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
69 69 convert_boolean_attrs, NotGiven, _make_safe_id_component
70 70 from webhelpers2.number import format_byte_size
71 71
72 72 from rhodecode.lib.action_parser import action_parser
73 73 from rhodecode.lib.ext_json import json
74 74 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
75 75 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
76 76 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
77 77 AttributeDict, safe_int, md5, md5_safe
78 78 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
79 79 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
80 80 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
81 81 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
82 82 from rhodecode.model.changeset_status import ChangesetStatusModel
83 83 from rhodecode.model.db import Permission, User, Repository
84 84 from rhodecode.model.repo_group import RepoGroupModel
85 85 from rhodecode.model.settings import IssueTrackerSettingsModel
86 86
87 87 log = logging.getLogger(__name__)
88 88
89 89
90 90 DEFAULT_USER = User.DEFAULT_USER
91 91 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
92 92
93 93
94 94 def asset(path, ver=None, **kwargs):
95 95 """
96 96 Helper to generate a static asset file path for rhodecode assets
97 97
98 98 eg. h.asset('images/image.png', ver='3923')
99 99
100 100 :param path: path of asset
101 101 :param ver: optional version query param to append as ?ver=
102 102 """
103 103 request = get_current_request()
104 104 query = {}
105 105 query.update(kwargs)
106 106 if ver:
107 107 query = {'ver': ver}
108 108 return request.static_path(
109 109 'rhodecode:public/{}'.format(path), _query=query)
110 110
111 111
112 112 default_html_escape_table = {
113 113 ord('&'): u'&amp;',
114 114 ord('<'): u'&lt;',
115 115 ord('>'): u'&gt;',
116 116 ord('"'): u'&quot;',
117 117 ord("'"): u'&#39;',
118 118 }
119 119
120 120
121 121 def html_escape(text, html_escape_table=default_html_escape_table):
122 122 """Produce entities within text."""
123 123 return text.translate(html_escape_table)
124 124
125 125
126 126 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
127 127 """
128 128 Truncate string ``s`` at the first occurrence of ``sub``.
129 129
130 130 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
131 131 """
132 132 suffix_if_chopped = suffix_if_chopped or ''
133 133 pos = s.find(sub)
134 134 if pos == -1:
135 135 return s
136 136
137 137 if inclusive:
138 138 pos += len(sub)
139 139
140 140 chopped = s[:pos]
141 141 left = s[pos:].strip()
142 142
143 143 if left and suffix_if_chopped:
144 144 chopped += suffix_if_chopped
145 145
146 146 return chopped
147 147
148 148
149 149 def shorter(text, size=20):
150 150 postfix = '...'
151 151 if len(text) > size:
152 152 return text[:size - len(postfix)] + postfix
153 153 return text
154 154
155 155
156 156 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
157 157 """
158 158 Reset button
159 159 """
160 160 _set_input_attrs(attrs, type, name, value)
161 161 _set_id_attr(attrs, id, name)
162 162 convert_boolean_attrs(attrs, ["disabled"])
163 163 return HTML.input(**attrs)
164 164
165 165 reset = _reset
166 166 safeid = _make_safe_id_component
167 167
168 168
169 169 def branding(name, length=40):
170 170 return truncate(name, length, indicator="")
171 171
172 172
173 173 def FID(raw_id, path):
174 174 """
175 175 Creates a unique ID for filenode based on it's hash of path and commit
176 176 it's safe to use in urls
177 177
178 178 :param raw_id:
179 179 :param path:
180 180 """
181 181
182 182 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
183 183
184 184
185 185 class _GetError(object):
186 186 """Get error from form_errors, and represent it as span wrapped error
187 187 message
188 188
189 189 :param field_name: field to fetch errors for
190 190 :param form_errors: form errors dict
191 191 """
192 192
193 193 def __call__(self, field_name, form_errors):
194 194 tmpl = """<span class="error_msg">%s</span>"""
195 195 if form_errors and field_name in form_errors:
196 196 return literal(tmpl % form_errors.get(field_name))
197 197
198 198 get_error = _GetError()
199 199
200 200
201 201 class _ToolTip(object):
202 202
203 203 def __call__(self, tooltip_title, trim_at=50):
204 204 """
205 205 Special function just to wrap our text into nice formatted
206 206 autowrapped text
207 207
208 208 :param tooltip_title:
209 209 """
210 210 tooltip_title = escape(tooltip_title)
211 211 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
212 212 return tooltip_title
213 213 tooltip = _ToolTip()
214 214
215 215
216 216 def files_breadcrumbs(repo_name, commit_id, file_path):
217 217 if isinstance(file_path, str):
218 218 file_path = safe_unicode(file_path)
219 219
220 220 # TODO: johbo: Is this always a url like path, or is this operating
221 221 # system dependent?
222 222 path_segments = file_path.split('/')
223 223
224 224 repo_name_html = escape(repo_name)
225 225 if len(path_segments) == 1 and path_segments[0] == '':
226 226 url_segments = [repo_name_html]
227 227 else:
228 228 url_segments = [
229 229 link_to(
230 230 repo_name_html,
231 231 route_path(
232 232 'repo_files',
233 233 repo_name=repo_name,
234 234 commit_id=commit_id,
235 235 f_path=''),
236 236 class_='pjax-link')]
237 237
238 238 last_cnt = len(path_segments) - 1
239 239 for cnt, segment in enumerate(path_segments):
240 240 if not segment:
241 241 continue
242 242 segment_html = escape(segment)
243 243
244 244 if cnt != last_cnt:
245 245 url_segments.append(
246 246 link_to(
247 247 segment_html,
248 248 route_path(
249 249 'repo_files',
250 250 repo_name=repo_name,
251 251 commit_id=commit_id,
252 252 f_path='/'.join(path_segments[:cnt + 1])),
253 253 class_='pjax-link'))
254 254 else:
255 255 url_segments.append(segment_html)
256 256
257 257 return literal('/'.join(url_segments))
258 258
259 259
260 260 class CodeHtmlFormatter(HtmlFormatter):
261 261 """
262 262 My code Html Formatter for source codes
263 263 """
264 264
265 265 def wrap(self, source, outfile):
266 266 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
267 267
268 268 def _wrap_code(self, source):
269 269 for cnt, it in enumerate(source):
270 270 i, t = it
271 271 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
272 272 yield i, t
273 273
274 274 def _wrap_tablelinenos(self, inner):
275 275 dummyoutfile = StringIO.StringIO()
276 276 lncount = 0
277 277 for t, line in inner:
278 278 if t:
279 279 lncount += 1
280 280 dummyoutfile.write(line)
281 281
282 282 fl = self.linenostart
283 283 mw = len(str(lncount + fl - 1))
284 284 sp = self.linenospecial
285 285 st = self.linenostep
286 286 la = self.lineanchors
287 287 aln = self.anchorlinenos
288 288 nocls = self.noclasses
289 289 if sp:
290 290 lines = []
291 291
292 292 for i in range(fl, fl + lncount):
293 293 if i % st == 0:
294 294 if i % sp == 0:
295 295 if aln:
296 296 lines.append('<a href="#%s%d" class="special">%*d</a>' %
297 297 (la, i, mw, i))
298 298 else:
299 299 lines.append('<span class="special">%*d</span>' % (mw, i))
300 300 else:
301 301 if aln:
302 302 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
303 303 else:
304 304 lines.append('%*d' % (mw, i))
305 305 else:
306 306 lines.append('')
307 307 ls = '\n'.join(lines)
308 308 else:
309 309 lines = []
310 310 for i in range(fl, fl + lncount):
311 311 if i % st == 0:
312 312 if aln:
313 313 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
314 314 else:
315 315 lines.append('%*d' % (mw, i))
316 316 else:
317 317 lines.append('')
318 318 ls = '\n'.join(lines)
319 319
320 320 # in case you wonder about the seemingly redundant <div> here: since the
321 321 # content in the other cell also is wrapped in a div, some browsers in
322 322 # some configurations seem to mess up the formatting...
323 323 if nocls:
324 324 yield 0, ('<table class="%stable">' % self.cssclass +
325 325 '<tr><td><div class="linenodiv" '
326 326 'style="background-color: #f0f0f0; padding-right: 10px">'
327 327 '<pre style="line-height: 125%">' +
328 328 ls + '</pre></div></td><td id="hlcode" class="code">')
329 329 else:
330 330 yield 0, ('<table class="%stable">' % self.cssclass +
331 331 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
332 332 ls + '</pre></div></td><td id="hlcode" class="code">')
333 333 yield 0, dummyoutfile.getvalue()
334 334 yield 0, '</td></tr></table>'
335 335
336 336
337 337 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
338 338 def __init__(self, **kw):
339 339 # only show these line numbers if set
340 340 self.only_lines = kw.pop('only_line_numbers', [])
341 341 self.query_terms = kw.pop('query_terms', [])
342 342 self.max_lines = kw.pop('max_lines', 5)
343 343 self.line_context = kw.pop('line_context', 3)
344 344 self.url = kw.pop('url', None)
345 345
346 346 super(CodeHtmlFormatter, self).__init__(**kw)
347 347
348 348 def _wrap_code(self, source):
349 349 for cnt, it in enumerate(source):
350 350 i, t = it
351 351 t = '<pre>%s</pre>' % t
352 352 yield i, t
353 353
354 354 def _wrap_tablelinenos(self, inner):
355 355 yield 0, '<table class="code-highlight %stable">' % self.cssclass
356 356
357 357 last_shown_line_number = 0
358 358 current_line_number = 1
359 359
360 360 for t, line in inner:
361 361 if not t:
362 362 yield t, line
363 363 continue
364 364
365 365 if current_line_number in self.only_lines:
366 366 if last_shown_line_number + 1 != current_line_number:
367 367 yield 0, '<tr>'
368 368 yield 0, '<td class="line">...</td>'
369 369 yield 0, '<td id="hlcode" class="code"></td>'
370 370 yield 0, '</tr>'
371 371
372 372 yield 0, '<tr>'
373 373 if self.url:
374 374 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
375 375 self.url, current_line_number, current_line_number)
376 376 else:
377 377 yield 0, '<td class="line"><a href="">%i</a></td>' % (
378 378 current_line_number)
379 379 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
380 380 yield 0, '</tr>'
381 381
382 382 last_shown_line_number = current_line_number
383 383
384 384 current_line_number += 1
385 385
386 386
387 387 yield 0, '</table>'
388 388
389 389
390 390 def extract_phrases(text_query):
391 391 """
392 392 Extracts phrases from search term string making sure phrases
393 393 contained in double quotes are kept together - and discarding empty values
394 394 or fully whitespace values eg.
395 395
396 396 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
397 397
398 398 """
399 399
400 400 in_phrase = False
401 401 buf = ''
402 402 phrases = []
403 403 for char in text_query:
404 404 if in_phrase:
405 405 if char == '"': # end phrase
406 406 phrases.append(buf)
407 407 buf = ''
408 408 in_phrase = False
409 409 continue
410 410 else:
411 411 buf += char
412 412 continue
413 413 else:
414 414 if char == '"': # start phrase
415 415 in_phrase = True
416 416 phrases.append(buf)
417 417 buf = ''
418 418 continue
419 419 elif char == ' ':
420 420 phrases.append(buf)
421 421 buf = ''
422 422 continue
423 423 else:
424 424 buf += char
425 425
426 426 phrases.append(buf)
427 427 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
428 428 return phrases
429 429
430 430
431 431 def get_matching_offsets(text, phrases):
432 432 """
433 433 Returns a list of string offsets in `text` that the list of `terms` match
434 434
435 435 >>> get_matching_offsets('some text here', ['some', 'here'])
436 436 [(0, 4), (10, 14)]
437 437
438 438 """
439 439 offsets = []
440 440 for phrase in phrases:
441 441 for match in re.finditer(phrase, text):
442 442 offsets.append((match.start(), match.end()))
443 443
444 444 return offsets
445 445
446 446
447 447 def normalize_text_for_matching(x):
448 448 """
449 449 Replaces all non alnum characters to spaces and lower cases the string,
450 450 useful for comparing two text strings without punctuation
451 451 """
452 452 return re.sub(r'[^\w]', ' ', x.lower())
453 453
454 454
455 455 def get_matching_line_offsets(lines, terms):
456 456 """ Return a set of `lines` indices (starting from 1) matching a
457 457 text search query, along with `context` lines above/below matching lines
458 458
459 459 :param lines: list of strings representing lines
460 460 :param terms: search term string to match in lines eg. 'some text'
461 461 :param context: number of lines above/below a matching line to add to result
462 462 :param max_lines: cut off for lines of interest
463 463 eg.
464 464
465 465 text = '''
466 466 words words words
467 467 words words words
468 468 some text some
469 469 words words words
470 470 words words words
471 471 text here what
472 472 '''
473 473 get_matching_line_offsets(text, 'text', context=1)
474 474 {3: [(5, 9)], 6: [(0, 4)]]
475 475
476 476 """
477 477 matching_lines = {}
478 478 phrases = [normalize_text_for_matching(phrase)
479 479 for phrase in extract_phrases(terms)]
480 480
481 481 for line_index, line in enumerate(lines, start=1):
482 482 match_offsets = get_matching_offsets(
483 483 normalize_text_for_matching(line), phrases)
484 484 if match_offsets:
485 485 matching_lines[line_index] = match_offsets
486 486
487 487 return matching_lines
488 488
489 489
490 490 def hsv_to_rgb(h, s, v):
491 491 """ Convert hsv color values to rgb """
492 492
493 493 if s == 0.0:
494 494 return v, v, v
495 495 i = int(h * 6.0) # XXX assume int() truncates!
496 496 f = (h * 6.0) - i
497 497 p = v * (1.0 - s)
498 498 q = v * (1.0 - s * f)
499 499 t = v * (1.0 - s * (1.0 - f))
500 500 i = i % 6
501 501 if i == 0:
502 502 return v, t, p
503 503 if i == 1:
504 504 return q, v, p
505 505 if i == 2:
506 506 return p, v, t
507 507 if i == 3:
508 508 return p, q, v
509 509 if i == 4:
510 510 return t, p, v
511 511 if i == 5:
512 512 return v, p, q
513 513
514 514
515 515 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
516 516 """
517 517 Generator for getting n of evenly distributed colors using
518 518 hsv color and golden ratio. It always return same order of colors
519 519
520 520 :param n: number of colors to generate
521 521 :param saturation: saturation of returned colors
522 522 :param lightness: lightness of returned colors
523 523 :returns: RGB tuple
524 524 """
525 525
526 526 golden_ratio = 0.618033988749895
527 527 h = 0.22717784590367374
528 528
529 529 for _ in xrange(n):
530 530 h += golden_ratio
531 531 h %= 1
532 532 HSV_tuple = [h, saturation, lightness]
533 533 RGB_tuple = hsv_to_rgb(*HSV_tuple)
534 534 yield map(lambda x: str(int(x * 256)), RGB_tuple)
535 535
536 536
537 537 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
538 538 """
539 539 Returns a function which when called with an argument returns a unique
540 540 color for that argument, eg.
541 541
542 542 :param n: number of colors to generate
543 543 :param saturation: saturation of returned colors
544 544 :param lightness: lightness of returned colors
545 545 :returns: css RGB string
546 546
547 547 >>> color_hash = color_hasher()
548 548 >>> color_hash('hello')
549 549 'rgb(34, 12, 59)'
550 550 >>> color_hash('hello')
551 551 'rgb(34, 12, 59)'
552 552 >>> color_hash('other')
553 553 'rgb(90, 224, 159)'
554 554 """
555 555
556 556 color_dict = {}
557 557 cgenerator = unique_color_generator(
558 558 saturation=saturation, lightness=lightness)
559 559
560 560 def get_color_string(thing):
561 561 if thing in color_dict:
562 562 col = color_dict[thing]
563 563 else:
564 564 col = color_dict[thing] = cgenerator.next()
565 565 return "rgb(%s)" % (', '.join(col))
566 566
567 567 return get_color_string
568 568
569 569
570 570 def get_lexer_safe(mimetype=None, filepath=None):
571 571 """
572 572 Tries to return a relevant pygments lexer using mimetype/filepath name,
573 573 defaulting to plain text if none could be found
574 574 """
575 575 lexer = None
576 576 try:
577 577 if mimetype:
578 578 lexer = get_lexer_for_mimetype(mimetype)
579 579 if not lexer:
580 580 lexer = get_lexer_for_filename(filepath)
581 581 except pygments.util.ClassNotFound:
582 582 pass
583 583
584 584 if not lexer:
585 585 lexer = get_lexer_by_name('text')
586 586
587 587 return lexer
588 588
589 589
590 590 def get_lexer_for_filenode(filenode):
591 591 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
592 592 return lexer
593 593
594 594
595 595 def pygmentize(filenode, **kwargs):
596 596 """
597 597 pygmentize function using pygments
598 598
599 599 :param filenode:
600 600 """
601 601 lexer = get_lexer_for_filenode(filenode)
602 602 return literal(code_highlight(filenode.content, lexer,
603 603 CodeHtmlFormatter(**kwargs)))
604 604
605 605
606 606 def is_following_repo(repo_name, user_id):
607 607 from rhodecode.model.scm import ScmModel
608 608 return ScmModel().is_following_repo(repo_name, user_id)
609 609
610 610
611 611 class _Message(object):
612 612 """A message returned by ``Flash.pop_messages()``.
613 613
614 614 Converting the message to a string returns the message text. Instances
615 615 also have the following attributes:
616 616
617 617 * ``message``: the message text.
618 618 * ``category``: the category specified when the message was created.
619 619 """
620 620
621 621 def __init__(self, category, message):
622 622 self.category = category
623 623 self.message = message
624 624
625 625 def __str__(self):
626 626 return self.message
627 627
628 628 __unicode__ = __str__
629 629
630 630 def __html__(self):
631 631 return escape(safe_unicode(self.message))
632 632
633 633
634 634 class Flash(object):
635 635 # List of allowed categories. If None, allow any category.
636 636 categories = ["warning", "notice", "error", "success"]
637 637
638 638 # Default category if none is specified.
639 639 default_category = "notice"
640 640
641 641 def __init__(self, session_key="flash", categories=None,
642 642 default_category=None):
643 643 """
644 644 Instantiate a ``Flash`` object.
645 645
646 646 ``session_key`` is the key to save the messages under in the user's
647 647 session.
648 648
649 649 ``categories`` is an optional list which overrides the default list
650 650 of categories.
651 651
652 652 ``default_category`` overrides the default category used for messages
653 653 when none is specified.
654 654 """
655 655 self.session_key = session_key
656 656 if categories is not None:
657 657 self.categories = categories
658 658 if default_category is not None:
659 659 self.default_category = default_category
660 660 if self.categories and self.default_category not in self.categories:
661 661 raise ValueError(
662 662 "unrecognized default category %r" % (self.default_category,))
663 663
664 664 def pop_messages(self, session=None, request=None):
665 665 """
666 666 Return all accumulated messages and delete them from the session.
667 667
668 668 The return value is a list of ``Message`` objects.
669 669 """
670 670 messages = []
671 671
672 672 if not session:
673 673 if not request:
674 674 request = get_current_request()
675 675 session = request.session
676 676
677 677 # Pop the 'old' pylons flash messages. They are tuples of the form
678 678 # (category, message)
679 679 for cat, msg in session.pop(self.session_key, []):
680 680 messages.append(_Message(cat, msg))
681 681
682 682 # Pop the 'new' pyramid flash messages for each category as list
683 683 # of strings.
684 684 for cat in self.categories:
685 685 for msg in session.pop_flash(queue=cat):
686 686 messages.append(_Message(cat, msg))
687 687 # Map messages from the default queue to the 'notice' category.
688 688 for msg in session.pop_flash():
689 689 messages.append(_Message('notice', msg))
690 690
691 691 session.save()
692 692 return messages
693 693
694 694 def json_alerts(self, session=None, request=None):
695 695 payloads = []
696 696 messages = flash.pop_messages(session=session, request=request)
697 697 if messages:
698 698 for message in messages:
699 699 subdata = {}
700 700 if hasattr(message.message, 'rsplit'):
701 701 flash_data = message.message.rsplit('|DELIM|', 1)
702 702 org_message = flash_data[0]
703 703 if len(flash_data) > 1:
704 704 subdata = json.loads(flash_data[1])
705 705 else:
706 706 org_message = message.message
707 707 payloads.append({
708 708 'message': {
709 709 'message': u'{}'.format(org_message),
710 710 'level': message.category,
711 711 'force': True,
712 712 'subdata': subdata
713 713 }
714 714 })
715 715 return json.dumps(payloads)
716 716
717 717 def __call__(self, message, category=None, ignore_duplicate=False,
718 718 session=None, request=None):
719 719
720 720 if not session:
721 721 if not request:
722 722 request = get_current_request()
723 723 session = request.session
724 724
725 725 session.flash(
726 726 message, queue=category, allow_duplicate=not ignore_duplicate)
727 727
728 728
729 729 flash = Flash()
730 730
731 731 #==============================================================================
732 732 # SCM FILTERS available via h.
733 733 #==============================================================================
734 734 from rhodecode.lib.vcs.utils import author_name, author_email
735 735 from rhodecode.lib.utils2 import credentials_filter, age as _age
736 736 from rhodecode.model.db import User, ChangesetStatus
737 737
738 738 age = _age
739 739 capitalize = lambda x: x.capitalize()
740 740 email = author_email
741 741 short_id = lambda x: x[:12]
742 742 hide_credentials = lambda x: ''.join(credentials_filter(x))
743 743
744 744
745 745 def age_component(datetime_iso, value=None, time_is_local=False):
746 746 title = value or format_date(datetime_iso)
747 747 tzinfo = '+00:00'
748 748
749 749 # detect if we have a timezone info, otherwise, add it
750 750 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
751 751 if time_is_local:
752 752 tzinfo = time.strftime("+%H:%M",
753 753 time.gmtime(
754 754 (datetime.now() - datetime.utcnow()).seconds + 1
755 755 )
756 756 )
757 757
758 758 return literal(
759 759 '<time class="timeago tooltip" '
760 760 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
761 761 datetime_iso, title, tzinfo))
762 762
763 763
764 764 def _shorten_commit_id(commit_id):
765 765 from rhodecode import CONFIG
766 766 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
767 767 return commit_id[:def_len]
768 768
769 769
770 770 def show_id(commit):
771 771 """
772 772 Configurable function that shows ID
773 773 by default it's r123:fffeeefffeee
774 774
775 775 :param commit: commit instance
776 776 """
777 777 from rhodecode import CONFIG
778 778 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
779 779
780 780 raw_id = _shorten_commit_id(commit.raw_id)
781 781 if show_idx:
782 782 return 'r%s:%s' % (commit.idx, raw_id)
783 783 else:
784 784 return '%s' % (raw_id, )
785 785
786 786
787 787 def format_date(date):
788 788 """
789 789 use a standardized formatting for dates used in RhodeCode
790 790
791 791 :param date: date/datetime object
792 792 :return: formatted date
793 793 """
794 794
795 795 if date:
796 796 _fmt = "%a, %d %b %Y %H:%M:%S"
797 797 return safe_unicode(date.strftime(_fmt))
798 798
799 799 return u""
800 800
801 801
802 802 class _RepoChecker(object):
803 803
804 804 def __init__(self, backend_alias):
805 805 self._backend_alias = backend_alias
806 806
807 807 def __call__(self, repository):
808 808 if hasattr(repository, 'alias'):
809 809 _type = repository.alias
810 810 elif hasattr(repository, 'repo_type'):
811 811 _type = repository.repo_type
812 812 else:
813 813 _type = repository
814 814 return _type == self._backend_alias
815 815
816 816 is_git = _RepoChecker('git')
817 817 is_hg = _RepoChecker('hg')
818 818 is_svn = _RepoChecker('svn')
819 819
820 820
821 821 def get_repo_type_by_name(repo_name):
822 822 repo = Repository.get_by_repo_name(repo_name)
823 823 return repo.repo_type
824 824
825 825
826 826 def is_svn_without_proxy(repository):
827 827 if is_svn(repository):
828 828 from rhodecode.model.settings import VcsSettingsModel
829 829 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
830 830 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
831 831 return False
832 832
833 833
834 834 def discover_user(author):
835 835 """
836 836 Tries to discover RhodeCode User based on the autho string. Author string
837 837 is typically `FirstName LastName <email@address.com>`
838 838 """
839 839
840 840 # if author is already an instance use it for extraction
841 841 if isinstance(author, User):
842 842 return author
843 843
844 844 # Valid email in the attribute passed, see if they're in the system
845 845 _email = author_email(author)
846 846 if _email != '':
847 847 user = User.get_by_email(_email, case_insensitive=True, cache=True)
848 848 if user is not None:
849 849 return user
850 850
851 851 # Maybe it's a username, we try to extract it and fetch by username ?
852 852 _author = author_name(author)
853 853 user = User.get_by_username(_author, case_insensitive=True, cache=True)
854 854 if user is not None:
855 855 return user
856 856
857 857 return None
858 858
859 859
860 860 def email_or_none(author):
861 861 # extract email from the commit string
862 862 _email = author_email(author)
863 863
864 864 # If we have an email, use it, otherwise
865 865 # see if it contains a username we can get an email from
866 866 if _email != '':
867 867 return _email
868 868 else:
869 869 user = User.get_by_username(
870 870 author_name(author), case_insensitive=True, cache=True)
871 871
872 872 if user is not None:
873 873 return user.email
874 874
875 875 # No valid email, not a valid user in the system, none!
876 876 return None
877 877
878 878
879 879 def link_to_user(author, length=0, **kwargs):
880 880 user = discover_user(author)
881 881 # user can be None, but if we have it already it means we can re-use it
882 882 # in the person() function, so we save 1 intensive-query
883 883 if user:
884 884 author = user
885 885
886 886 display_person = person(author, 'username_or_name_or_email')
887 887 if length:
888 888 display_person = shorter(display_person, length)
889 889
890 890 if user:
891 891 return link_to(
892 892 escape(display_person),
893 893 route_path('user_profile', username=user.username),
894 894 **kwargs)
895 895 else:
896 896 return escape(display_person)
897 897
898 898
899 899 def person(author, show_attr="username_and_name"):
900 900 user = discover_user(author)
901 901 if user:
902 902 return getattr(user, show_attr)
903 903 else:
904 904 _author = author_name(author)
905 905 _email = email(author)
906 906 return _author or _email
907 907
908 908
909 909 def author_string(email):
910 910 if email:
911 911 user = User.get_by_email(email, case_insensitive=True, cache=True)
912 912 if user:
913 913 if user.first_name or user.last_name:
914 914 return '%s %s &lt;%s&gt;' % (
915 915 user.first_name, user.last_name, email)
916 916 else:
917 917 return email
918 918 else:
919 919 return email
920 920 else:
921 921 return None
922 922
923 923
924 924 def person_by_id(id_, show_attr="username_and_name"):
925 925 # attr to return from fetched user
926 926 person_getter = lambda usr: getattr(usr, show_attr)
927 927
928 928 #maybe it's an ID ?
929 929 if str(id_).isdigit() or isinstance(id_, int):
930 930 id_ = int(id_)
931 931 user = User.get(id_)
932 932 if user is not None:
933 933 return person_getter(user)
934 934 return id_
935 935
936 936
937 937 def gravatar_with_user(request, author, show_disabled=False):
938 938 _render = request.get_partial_renderer(
939 939 'rhodecode:templates/base/base.mako')
940 940 return _render('gravatar_with_user', author, show_disabled=show_disabled)
941 941
942 942
943 943 tags_paterns = OrderedDict((
944 944 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
945 945 '<div class="metatag" tag="lang">\\2</div>')),
946 946
947 947 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
948 948 '<div class="metatag" tag="see">see: \\1 </div>')),
949 949
950 950 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
951 951 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
952 952
953 953 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
954 954 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
955 955
956 956 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
957 957 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
958 958
959 959 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
960 960 '<div class="metatag" tag="state \\1">\\1</div>')),
961 961
962 962 # label in grey
963 963 ('label', (re.compile(r'\[([a-z]+)\]'),
964 964 '<div class="metatag" tag="label">\\1</div>')),
965 965
966 966 # generic catch all in grey
967 967 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
968 968 '<div class="metatag" tag="generic">\\1</div>')),
969 969 ))
970 970
971 971
972 972 def extract_metatags(value):
973 973 """
974 974 Extract supported meta-tags from given text value
975 975 """
976 976 tags = []
977 977 if not value:
978 978 return tags, ''
979 979
980 980 for key, val in tags_paterns.items():
981 981 pat, replace_html = val
982 982 tags.extend([(key, x.group()) for x in pat.finditer(value)])
983 983 value = pat.sub('', value)
984 984
985 985 return tags, value
986 986
987 987
988 988 def style_metatag(tag_type, value):
989 989 """
990 990 converts tags from value into html equivalent
991 991 """
992 992 if not value:
993 993 return ''
994 994
995 995 html_value = value
996 996 tag_data = tags_paterns.get(tag_type)
997 997 if tag_data:
998 998 pat, replace_html = tag_data
999 999 # convert to plain `unicode` instead of a markup tag to be used in
1000 1000 # regex expressions. safe_unicode doesn't work here
1001 1001 html_value = pat.sub(replace_html, unicode(value))
1002 1002
1003 1003 return html_value
1004 1004
1005 1005
1006 1006 def bool2icon(value):
1007 1007 """
1008 1008 Returns boolean value of a given value, represented as html element with
1009 1009 classes that will represent icons
1010 1010
1011 1011 :param value: given value to convert to html node
1012 1012 """
1013 1013
1014 1014 if value: # does bool conversion
1015 1015 return HTML.tag('i', class_="icon-true")
1016 1016 else: # not true as bool
1017 1017 return HTML.tag('i', class_="icon-false")
1018 1018
1019 1019
1020 1020 #==============================================================================
1021 1021 # PERMS
1022 1022 #==============================================================================
1023 1023 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1024 1024 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1025 1025 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1026 1026 csrf_token_key
1027 1027
1028 1028
1029 1029 #==============================================================================
1030 1030 # GRAVATAR URL
1031 1031 #==============================================================================
1032 1032 class InitialsGravatar(object):
1033 1033 def __init__(self, email_address, first_name, last_name, size=30,
1034 1034 background=None, text_color='#fff'):
1035 1035 self.size = size
1036 1036 self.first_name = first_name
1037 1037 self.last_name = last_name
1038 1038 self.email_address = email_address
1039 1039 self.background = background or self.str2color(email_address)
1040 1040 self.text_color = text_color
1041 1041
1042 1042 def get_color_bank(self):
1043 1043 """
1044 1044 returns a predefined list of colors that gravatars can use.
1045 1045 Those are randomized distinct colors that guarantee readability and
1046 1046 uniqueness.
1047 1047
1048 1048 generated with: http://phrogz.net/css/distinct-colors.html
1049 1049 """
1050 1050 return [
1051 1051 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1052 1052 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1053 1053 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1054 1054 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1055 1055 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1056 1056 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1057 1057 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1058 1058 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1059 1059 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1060 1060 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1061 1061 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1062 1062 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1063 1063 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1064 1064 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1065 1065 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1066 1066 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1067 1067 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1068 1068 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1069 1069 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1070 1070 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1071 1071 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1072 1072 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1073 1073 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1074 1074 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1075 1075 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1076 1076 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1077 1077 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1078 1078 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1079 1079 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1080 1080 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1081 1081 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1082 1082 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1083 1083 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1084 1084 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1085 1085 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1086 1086 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1087 1087 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1088 1088 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1089 1089 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1090 1090 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1091 1091 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1092 1092 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1093 1093 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1094 1094 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1095 1095 '#4f8c46', '#368dd9', '#5c0073'
1096 1096 ]
1097 1097
1098 1098 def rgb_to_hex_color(self, rgb_tuple):
1099 1099 """
1100 1100 Converts an rgb_tuple passed to an hex color.
1101 1101
1102 1102 :param rgb_tuple: tuple with 3 ints represents rgb color space
1103 1103 """
1104 1104 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1105 1105
1106 1106 def email_to_int_list(self, email_str):
1107 1107 """
1108 1108 Get every byte of the hex digest value of email and turn it to integer.
1109 1109 It's going to be always between 0-255
1110 1110 """
1111 1111 digest = md5_safe(email_str.lower())
1112 1112 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1113 1113
1114 1114 def pick_color_bank_index(self, email_str, color_bank):
1115 1115 return self.email_to_int_list(email_str)[0] % len(color_bank)
1116 1116
1117 1117 def str2color(self, email_str):
1118 1118 """
1119 1119 Tries to map in a stable algorithm an email to color
1120 1120
1121 1121 :param email_str:
1122 1122 """
1123 1123 color_bank = self.get_color_bank()
1124 1124 # pick position (module it's length so we always find it in the
1125 1125 # bank even if it's smaller than 256 values
1126 1126 pos = self.pick_color_bank_index(email_str, color_bank)
1127 1127 return color_bank[pos]
1128 1128
1129 1129 def normalize_email(self, email_address):
1130 1130 import unicodedata
1131 1131 # default host used to fill in the fake/missing email
1132 1132 default_host = u'localhost'
1133 1133
1134 1134 if not email_address:
1135 1135 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1136 1136
1137 1137 email_address = safe_unicode(email_address)
1138 1138
1139 1139 if u'@' not in email_address:
1140 1140 email_address = u'%s@%s' % (email_address, default_host)
1141 1141
1142 1142 if email_address.endswith(u'@'):
1143 1143 email_address = u'%s%s' % (email_address, default_host)
1144 1144
1145 1145 email_address = unicodedata.normalize('NFKD', email_address)\
1146 1146 .encode('ascii', 'ignore')
1147 1147 return email_address
1148 1148
1149 1149 def get_initials(self):
1150 1150 """
1151 1151 Returns 2 letter initials calculated based on the input.
1152 1152 The algorithm picks first given email address, and takes first letter
1153 1153 of part before @, and then the first letter of server name. In case
1154 1154 the part before @ is in a format of `somestring.somestring2` it replaces
1155 1155 the server letter with first letter of somestring2
1156 1156
1157 1157 In case function was initialized with both first and lastname, this
1158 1158 overrides the extraction from email by first letter of the first and
1159 1159 last name. We add special logic to that functionality, In case Full name
1160 1160 is compound, like Guido Von Rossum, we use last part of the last name
1161 1161 (Von Rossum) picking `R`.
1162 1162
1163 1163 Function also normalizes the non-ascii characters to they ascii
1164 1164 representation, eg Ą => A
1165 1165 """
1166 1166 import unicodedata
1167 1167 # replace non-ascii to ascii
1168 1168 first_name = unicodedata.normalize(
1169 1169 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1170 1170 last_name = unicodedata.normalize(
1171 1171 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1172 1172
1173 1173 # do NFKD encoding, and also make sure email has proper format
1174 1174 email_address = self.normalize_email(self.email_address)
1175 1175
1176 1176 # first push the email initials
1177 1177 prefix, server = email_address.split('@', 1)
1178 1178
1179 1179 # check if prefix is maybe a 'first_name.last_name' syntax
1180 1180 _dot_split = prefix.rsplit('.', 1)
1181 1181 if len(_dot_split) == 2 and _dot_split[1]:
1182 1182 initials = [_dot_split[0][0], _dot_split[1][0]]
1183 1183 else:
1184 1184 initials = [prefix[0], server[0]]
1185 1185
1186 1186 # then try to replace either first_name or last_name
1187 1187 fn_letter = (first_name or " ")[0].strip()
1188 1188 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1189 1189
1190 1190 if fn_letter:
1191 1191 initials[0] = fn_letter
1192 1192
1193 1193 if ln_letter:
1194 1194 initials[1] = ln_letter
1195 1195
1196 1196 return ''.join(initials).upper()
1197 1197
1198 1198 def get_img_data_by_type(self, font_family, img_type):
1199 1199 default_user = """
1200 1200 <svg xmlns="http://www.w3.org/2000/svg"
1201 1201 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1202 1202 viewBox="-15 -10 439.165 429.164"
1203 1203
1204 1204 xml:space="preserve"
1205 1205 style="background:{background};" >
1206 1206
1207 1207 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1208 1208 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1209 1209 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1210 1210 168.596,153.916,216.671,
1211 1211 204.583,216.671z" fill="{text_color}"/>
1212 1212 <path d="M407.164,374.717L360.88,
1213 1213 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1214 1214 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1215 1215 15.366-44.203,23.488-69.076,23.488c-24.877,
1216 1216 0-48.762-8.122-69.078-23.488
1217 1217 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1218 1218 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1219 1219 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1220 1220 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1221 1221 19.402-10.527 C409.699,390.129,
1222 1222 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1223 1223 </svg>""".format(
1224 1224 size=self.size,
1225 1225 background='#979797', # @grey4
1226 1226 text_color=self.text_color,
1227 1227 font_family=font_family)
1228 1228
1229 1229 return {
1230 1230 "default_user": default_user
1231 1231 }[img_type]
1232 1232
1233 1233 def get_img_data(self, svg_type=None):
1234 1234 """
1235 1235 generates the svg metadata for image
1236 1236 """
1237 1237
1238 1238 font_family = ','.join([
1239 1239 'proximanovaregular',
1240 1240 'Proxima Nova Regular',
1241 1241 'Proxima Nova',
1242 1242 'Arial',
1243 1243 'Lucida Grande',
1244 1244 'sans-serif'
1245 1245 ])
1246 1246 if svg_type:
1247 1247 return self.get_img_data_by_type(font_family, svg_type)
1248 1248
1249 1249 initials = self.get_initials()
1250 1250 img_data = """
1251 1251 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1252 1252 width="{size}" height="{size}"
1253 1253 style="width: 100%; height: 100%; background-color: {background}"
1254 1254 viewBox="0 0 {size} {size}">
1255 1255 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1256 1256 pointer-events="auto" fill="{text_color}"
1257 1257 font-family="{font_family}"
1258 1258 style="font-weight: 400; font-size: {f_size}px;">{text}
1259 1259 </text>
1260 1260 </svg>""".format(
1261 1261 size=self.size,
1262 1262 f_size=self.size/1.85, # scale the text inside the box nicely
1263 1263 background=self.background,
1264 1264 text_color=self.text_color,
1265 1265 text=initials.upper(),
1266 1266 font_family=font_family)
1267 1267
1268 1268 return img_data
1269 1269
1270 1270 def generate_svg(self, svg_type=None):
1271 1271 img_data = self.get_img_data(svg_type)
1272 1272 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1273 1273
1274 1274
1275 1275 def initials_gravatar(email_address, first_name, last_name, size=30):
1276 1276 svg_type = None
1277 1277 if email_address == User.DEFAULT_USER_EMAIL:
1278 1278 svg_type = 'default_user'
1279 1279 klass = InitialsGravatar(email_address, first_name, last_name, size)
1280 1280 return klass.generate_svg(svg_type=svg_type)
1281 1281
1282 1282
1283 1283 def gravatar_url(email_address, size=30, request=None):
1284 1284 request = get_current_request()
1285 1285 _use_gravatar = request.call_context.visual.use_gravatar
1286 1286 _gravatar_url = request.call_context.visual.gravatar_url
1287 1287
1288 1288 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1289 1289
1290 1290 email_address = email_address or User.DEFAULT_USER_EMAIL
1291 1291 if isinstance(email_address, unicode):
1292 1292 # hashlib crashes on unicode items
1293 1293 email_address = safe_str(email_address)
1294 1294
1295 1295 # empty email or default user
1296 1296 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1297 1297 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1298 1298
1299 1299 if _use_gravatar:
1300 1300 # TODO: Disuse pyramid thread locals. Think about another solution to
1301 1301 # get the host and schema here.
1302 1302 request = get_current_request()
1303 1303 tmpl = safe_str(_gravatar_url)
1304 1304 tmpl = tmpl.replace('{email}', email_address)\
1305 1305 .replace('{md5email}', md5_safe(email_address.lower())) \
1306 1306 .replace('{netloc}', request.host)\
1307 1307 .replace('{scheme}', request.scheme)\
1308 1308 .replace('{size}', safe_str(size))
1309 1309 return tmpl
1310 1310 else:
1311 1311 return initials_gravatar(email_address, '', '', size=size)
1312 1312
1313 1313
1314 1314 class Page(_Page):
1315 1315 """
1316 1316 Custom pager to match rendering style with paginator
1317 1317 """
1318 1318
1319 1319 def _get_pos(self, cur_page, max_page, items):
1320 1320 edge = (items / 2) + 1
1321 1321 if (cur_page <= edge):
1322 1322 radius = max(items / 2, items - cur_page)
1323 1323 elif (max_page - cur_page) < edge:
1324 1324 radius = (items - 1) - (max_page - cur_page)
1325 1325 else:
1326 1326 radius = items / 2
1327 1327
1328 1328 left = max(1, (cur_page - (radius)))
1329 1329 right = min(max_page, cur_page + (radius))
1330 1330 return left, cur_page, right
1331 1331
1332 1332 def _range(self, regexp_match):
1333 1333 """
1334 1334 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1335 1335
1336 1336 Arguments:
1337 1337
1338 1338 regexp_match
1339 1339 A "re" (regular expressions) match object containing the
1340 1340 radius of linked pages around the current page in
1341 1341 regexp_match.group(1) as a string
1342 1342
1343 1343 This function is supposed to be called as a callable in
1344 1344 re.sub.
1345 1345
1346 1346 """
1347 1347 radius = int(regexp_match.group(1))
1348 1348
1349 1349 # Compute the first and last page number within the radius
1350 1350 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1351 1351 # -> leftmost_page = 5
1352 1352 # -> rightmost_page = 9
1353 1353 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1354 1354 self.last_page,
1355 1355 (radius * 2) + 1)
1356 1356 nav_items = []
1357 1357
1358 1358 # Create a link to the first page (unless we are on the first page
1359 1359 # or there would be no need to insert '..' spacers)
1360 1360 if self.page != self.first_page and self.first_page < leftmost_page:
1361 1361 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1362 1362
1363 1363 # Insert dots if there are pages between the first page
1364 1364 # and the currently displayed page range
1365 1365 if leftmost_page - self.first_page > 1:
1366 1366 # Wrap in a SPAN tag if nolink_attr is set
1367 1367 text = '..'
1368 1368 if self.dotdot_attr:
1369 1369 text = HTML.span(c=text, **self.dotdot_attr)
1370 1370 nav_items.append(text)
1371 1371
1372 1372 for thispage in xrange(leftmost_page, rightmost_page + 1):
1373 1373 # Hilight the current page number and do not use a link
1374 1374 if thispage == self.page:
1375 1375 text = '%s' % (thispage,)
1376 1376 # Wrap in a SPAN tag if nolink_attr is set
1377 1377 if self.curpage_attr:
1378 1378 text = HTML.span(c=text, **self.curpage_attr)
1379 1379 nav_items.append(text)
1380 1380 # Otherwise create just a link to that page
1381 1381 else:
1382 1382 text = '%s' % (thispage,)
1383 1383 nav_items.append(self._pagerlink(thispage, text))
1384 1384
1385 1385 # Insert dots if there are pages between the displayed
1386 1386 # page numbers and the end of the page range
1387 1387 if self.last_page - rightmost_page > 1:
1388 1388 text = '..'
1389 1389 # Wrap in a SPAN tag if nolink_attr is set
1390 1390 if self.dotdot_attr:
1391 1391 text = HTML.span(c=text, **self.dotdot_attr)
1392 1392 nav_items.append(text)
1393 1393
1394 1394 # Create a link to the very last page (unless we are on the last
1395 1395 # page or there would be no need to insert '..' spacers)
1396 1396 if self.page != self.last_page and rightmost_page < self.last_page:
1397 1397 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1398 1398
1399 1399 ## prerender links
1400 1400 #_page_link = url.current()
1401 1401 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1402 1402 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1403 1403 return self.separator.join(nav_items)
1404 1404
1405 1405 def pager(self, format='~2~', page_param='page', partial_param='partial',
1406 1406 show_if_single_page=False, separator=' ', onclick=None,
1407 1407 symbol_first='<<', symbol_last='>>',
1408 1408 symbol_previous='<', symbol_next='>',
1409 1409 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1410 1410 curpage_attr={'class': 'pager_curpage'},
1411 1411 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1412 1412
1413 1413 self.curpage_attr = curpage_attr
1414 1414 self.separator = separator
1415 1415 self.pager_kwargs = kwargs
1416 1416 self.page_param = page_param
1417 1417 self.partial_param = partial_param
1418 1418 self.onclick = onclick
1419 1419 self.link_attr = link_attr
1420 1420 self.dotdot_attr = dotdot_attr
1421 1421
1422 1422 # Don't show navigator if there is no more than one page
1423 1423 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1424 1424 return ''
1425 1425
1426 1426 from string import Template
1427 1427 # Replace ~...~ in token format by range of pages
1428 1428 result = re.sub(r'~(\d+)~', self._range, format)
1429 1429
1430 1430 # Interpolate '%' variables
1431 1431 result = Template(result).safe_substitute({
1432 1432 'first_page': self.first_page,
1433 1433 'last_page': self.last_page,
1434 1434 'page': self.page,
1435 1435 'page_count': self.page_count,
1436 1436 'items_per_page': self.items_per_page,
1437 1437 'first_item': self.first_item,
1438 1438 'last_item': self.last_item,
1439 1439 'item_count': self.item_count,
1440 1440 'link_first': self.page > self.first_page and \
1441 1441 self._pagerlink(self.first_page, symbol_first) or '',
1442 1442 'link_last': self.page < self.last_page and \
1443 1443 self._pagerlink(self.last_page, symbol_last) or '',
1444 1444 'link_previous': self.previous_page and \
1445 1445 self._pagerlink(self.previous_page, symbol_previous) \
1446 1446 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1447 1447 'link_next': self.next_page and \
1448 1448 self._pagerlink(self.next_page, symbol_next) \
1449 1449 or HTML.span(symbol_next, class_="pg-next disabled")
1450 1450 })
1451 1451
1452 1452 return literal(result)
1453 1453
1454 1454
1455 1455 #==============================================================================
1456 1456 # REPO PAGER, PAGER FOR REPOSITORY
1457 1457 #==============================================================================
1458 1458 class RepoPage(Page):
1459 1459
1460 1460 def __init__(self, collection, page=1, items_per_page=20,
1461 1461 item_count=None, url=None, **kwargs):
1462 1462
1463 1463 """Create a "RepoPage" instance. special pager for paging
1464 1464 repository
1465 1465 """
1466 1466 self._url_generator = url
1467 1467
1468 1468 # Safe the kwargs class-wide so they can be used in the pager() method
1469 1469 self.kwargs = kwargs
1470 1470
1471 1471 # Save a reference to the collection
1472 1472 self.original_collection = collection
1473 1473
1474 1474 self.collection = collection
1475 1475
1476 1476 # The self.page is the number of the current page.
1477 1477 # The first page has the number 1!
1478 1478 try:
1479 1479 self.page = int(page) # make it int() if we get it as a string
1480 1480 except (ValueError, TypeError):
1481 1481 self.page = 1
1482 1482
1483 1483 self.items_per_page = items_per_page
1484 1484
1485 1485 # Unless the user tells us how many items the collections has
1486 1486 # we calculate that ourselves.
1487 1487 if item_count is not None:
1488 1488 self.item_count = item_count
1489 1489 else:
1490 1490 self.item_count = len(self.collection)
1491 1491
1492 1492 # Compute the number of the first and last available page
1493 1493 if self.item_count > 0:
1494 1494 self.first_page = 1
1495 1495 self.page_count = int(math.ceil(float(self.item_count) /
1496 1496 self.items_per_page))
1497 1497 self.last_page = self.first_page + self.page_count - 1
1498 1498
1499 1499 # Make sure that the requested page number is the range of
1500 1500 # valid pages
1501 1501 if self.page > self.last_page:
1502 1502 self.page = self.last_page
1503 1503 elif self.page < self.first_page:
1504 1504 self.page = self.first_page
1505 1505
1506 1506 # Note: the number of items on this page can be less than
1507 1507 # items_per_page if the last page is not full
1508 1508 self.first_item = max(0, (self.item_count) - (self.page *
1509 1509 items_per_page))
1510 1510 self.last_item = ((self.item_count - 1) - items_per_page *
1511 1511 (self.page - 1))
1512 1512
1513 1513 self.items = list(self.collection[self.first_item:self.last_item + 1])
1514 1514
1515 1515 # Links to previous and next page
1516 1516 if self.page > self.first_page:
1517 1517 self.previous_page = self.page - 1
1518 1518 else:
1519 1519 self.previous_page = None
1520 1520
1521 1521 if self.page < self.last_page:
1522 1522 self.next_page = self.page + 1
1523 1523 else:
1524 1524 self.next_page = None
1525 1525
1526 1526 # No items available
1527 1527 else:
1528 1528 self.first_page = None
1529 1529 self.page_count = 0
1530 1530 self.last_page = None
1531 1531 self.first_item = None
1532 1532 self.last_item = None
1533 1533 self.previous_page = None
1534 1534 self.next_page = None
1535 1535 self.items = []
1536 1536
1537 1537 # This is a subclass of the 'list' type. Initialise the list now.
1538 1538 list.__init__(self, reversed(self.items))
1539 1539
1540 1540
1541 1541 def breadcrumb_repo_link(repo):
1542 1542 """
1543 1543 Makes a breadcrumbs path link to repo
1544 1544
1545 1545 ex::
1546 1546 group >> subgroup >> repo
1547 1547
1548 1548 :param repo: a Repository instance
1549 1549 """
1550 1550
1551 1551 path = [
1552 1552 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1553 1553 for group in repo.groups_with_parents
1554 1554 ] + [
1555 1555 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1556 1556 ]
1557 1557
1558 1558 return literal(' &raquo; '.join(path))
1559 1559
1560 1560
1561 1561 def format_byte_size_binary(file_size):
1562 1562 """
1563 1563 Formats file/folder sizes to standard.
1564 1564 """
1565 1565 if file_size is None:
1566 1566 file_size = 0
1567 1567
1568 1568 formatted_size = format_byte_size(file_size, binary=True)
1569 1569 return formatted_size
1570 1570
1571 1571
1572 1572 def urlify_text(text_, safe=True):
1573 1573 """
1574 1574 Extrac urls from text and make html links out of them
1575 1575
1576 1576 :param text_:
1577 1577 """
1578 1578
1579 1579 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1580 1580 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1581 1581
1582 1582 def url_func(match_obj):
1583 1583 url_full = match_obj.groups()[0]
1584 1584 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1585 1585 _newtext = url_pat.sub(url_func, text_)
1586 1586 if safe:
1587 1587 return literal(_newtext)
1588 1588 return _newtext
1589 1589
1590 1590
1591 1591 def urlify_commits(text_, repository):
1592 1592 """
1593 1593 Extract commit ids from text and make link from them
1594 1594
1595 1595 :param text_:
1596 1596 :param repository: repo name to build the URL with
1597 1597 """
1598 1598
1599 1599 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1600 1600
1601 1601 def url_func(match_obj):
1602 1602 commit_id = match_obj.groups()[1]
1603 1603 pref = match_obj.groups()[0]
1604 1604 suf = match_obj.groups()[2]
1605 1605
1606 1606 tmpl = (
1607 1607 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1608 1608 '%(commit_id)s</a>%(suf)s'
1609 1609 )
1610 1610 return tmpl % {
1611 1611 'pref': pref,
1612 1612 'cls': 'revision-link',
1613 1613 'url': route_url('repo_commit', repo_name=repository,
1614 1614 commit_id=commit_id),
1615 1615 'commit_id': commit_id,
1616 1616 'suf': suf
1617 1617 }
1618 1618
1619 1619 newtext = URL_PAT.sub(url_func, text_)
1620 1620
1621 1621 return newtext
1622 1622
1623 1623
1624 1624 def _process_url_func(match_obj, repo_name, uid, entry,
1625 1625 return_raw_data=False, link_format='html'):
1626 1626 pref = ''
1627 1627 if match_obj.group().startswith(' '):
1628 1628 pref = ' '
1629 1629
1630 1630 issue_id = ''.join(match_obj.groups())
1631 1631
1632 1632 if link_format == 'html':
1633 1633 tmpl = (
1634 1634 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1635 1635 '%(issue-prefix)s%(id-repr)s'
1636 1636 '</a>')
1637 1637 elif link_format == 'rst':
1638 1638 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1639 1639 elif link_format == 'markdown':
1640 1640 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1641 1641 else:
1642 1642 raise ValueError('Bad link_format:{}'.format(link_format))
1643 1643
1644 1644 (repo_name_cleaned,
1645 1645 parent_group_name) = RepoGroupModel().\
1646 1646 _get_group_name_and_parent(repo_name)
1647 1647
1648 1648 # variables replacement
1649 1649 named_vars = {
1650 1650 'id': issue_id,
1651 1651 'repo': repo_name,
1652 1652 'repo_name': repo_name_cleaned,
1653 1653 'group_name': parent_group_name
1654 1654 }
1655 1655 # named regex variables
1656 1656 named_vars.update(match_obj.groupdict())
1657 1657 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1658 1658
1659 1659 data = {
1660 1660 'pref': pref,
1661 1661 'cls': 'issue-tracker-link',
1662 1662 'url': _url,
1663 1663 'id-repr': issue_id,
1664 1664 'issue-prefix': entry['pref'],
1665 1665 'serv': entry['url'],
1666 1666 }
1667 1667 if return_raw_data:
1668 1668 return {
1669 1669 'id': issue_id,
1670 1670 'url': _url
1671 1671 }
1672 1672 return tmpl % data
1673 1673
1674 1674
1675 1675 def get_active_pattern_entries(repo_name):
1676 1676 repo = None
1677 1677 if repo_name:
1678 1678 # Retrieving repo_name to avoid invalid repo_name to explode on
1679 1679 # IssueTrackerSettingsModel but still passing invalid name further down
1680 1680 repo = Repository.get_by_repo_name(repo_name, cache=True)
1681 1681
1682 1682 settings_model = IssueTrackerSettingsModel(repo=repo)
1683 1683 active_entries = settings_model.get_settings(cache=True)
1684 1684 return active_entries
1685 1685
1686 1686
1687 1687 def process_patterns(text_string, repo_name, link_format='html',
1688 1688 active_entries=None):
1689 1689
1690 1690 allowed_formats = ['html', 'rst', 'markdown']
1691 1691 if link_format not in allowed_formats:
1692 1692 raise ValueError('Link format can be only one of:{} got {}'.format(
1693 1693 allowed_formats, link_format))
1694 1694
1695 1695 active_entries = active_entries or get_active_pattern_entries(repo_name)
1696 1696 issues_data = []
1697 1697 newtext = text_string
1698 1698
1699 1699 for uid, entry in active_entries.items():
1700 1700 log.debug('found issue tracker entry with uid %s' % (uid,))
1701 1701
1702 1702 if not (entry['pat'] and entry['url']):
1703 1703 log.debug('skipping due to missing data')
1704 1704 continue
1705 1705
1706 1706 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1707 1707 % (uid, entry['pat'], entry['url'], entry['pref']))
1708 1708
1709 1709 try:
1710 1710 pattern = re.compile(r'%s' % entry['pat'])
1711 1711 except re.error:
1712 1712 log.exception(
1713 1713 'issue tracker pattern: `%s` failed to compile',
1714 1714 entry['pat'])
1715 1715 continue
1716 1716
1717 1717 data_func = partial(
1718 1718 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1719 1719 return_raw_data=True)
1720 1720
1721 1721 for match_obj in pattern.finditer(text_string):
1722 1722 issues_data.append(data_func(match_obj))
1723 1723
1724 1724 url_func = partial(
1725 1725 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1726 1726 link_format=link_format)
1727 1727
1728 1728 newtext = pattern.sub(url_func, newtext)
1729 1729 log.debug('processed prefix:uid `%s`' % (uid,))
1730 1730
1731 1731 return newtext, issues_data
1732 1732
1733 1733
1734 1734 def urlify_commit_message(commit_text, repository=None,
1735 1735 active_pattern_entries=None):
1736 1736 """
1737 1737 Parses given text message and makes proper links.
1738 1738 issues are linked to given issue-server, and rest is a commit link
1739 1739
1740 1740 :param commit_text:
1741 1741 :param repository:
1742 1742 """
1743 1743 def escaper(string):
1744 1744 return string.replace('<', '&lt;').replace('>', '&gt;')
1745 1745
1746 1746 newtext = escaper(commit_text)
1747 1747
1748 1748 # extract http/https links and make them real urls
1749 1749 newtext = urlify_text(newtext, safe=False)
1750 1750
1751 1751 # urlify commits - extract commit ids and make link out of them, if we have
1752 1752 # the scope of repository present.
1753 1753 if repository:
1754 1754 newtext = urlify_commits(newtext, repository)
1755 1755
1756 1756 # process issue tracker patterns
1757 1757 newtext, issues = process_patterns(newtext, repository or '',
1758 1758 active_entries=active_pattern_entries)
1759 1759
1760 1760 return literal(newtext)
1761 1761
1762 1762
1763 1763 def render_binary(repo_name, file_obj):
1764 1764 """
1765 1765 Choose how to render a binary file
1766 1766 """
1767 1767 filename = file_obj.name
1768 1768
1769 1769 # images
1770 1770 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1771 1771 if fnmatch.fnmatch(filename, pat=ext):
1772 1772 alt = filename
1773 1773 src = route_path(
1774 1774 'repo_file_raw', repo_name=repo_name,
1775 1775 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1776 1776 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1777 1777
1778 1778
1779 1779 def renderer_from_filename(filename, exclude=None):
1780 1780 """
1781 1781 choose a renderer based on filename, this works only for text based files
1782 1782 """
1783 1783
1784 1784 # ipython
1785 1785 for ext in ['*.ipynb']:
1786 1786 if fnmatch.fnmatch(filename, pat=ext):
1787 1787 return 'jupyter'
1788 1788
1789 1789 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1790 1790 if is_markup:
1791 1791 return is_markup
1792 1792 return None
1793 1793
1794 1794
1795 1795 def render(source, renderer='rst', mentions=False, relative_urls=None,
1796 1796 repo_name=None):
1797 1797
1798 1798 def maybe_convert_relative_links(html_source):
1799 1799 if relative_urls:
1800 1800 return relative_links(html_source, relative_urls)
1801 1801 return html_source
1802 1802
1803 1803 if renderer == 'rst':
1804 1804 if repo_name:
1805 1805 # process patterns on comments if we pass in repo name
1806 1806 source, issues = process_patterns(
1807 1807 source, repo_name, link_format='rst')
1808 1808
1809 1809 return literal(
1810 1810 '<div class="rst-block">%s</div>' %
1811 1811 maybe_convert_relative_links(
1812 1812 MarkupRenderer.rst(source, mentions=mentions)))
1813 1813 elif renderer == 'markdown':
1814 1814 if repo_name:
1815 1815 # process patterns on comments if we pass in repo name
1816 1816 source, issues = process_patterns(
1817 1817 source, repo_name, link_format='markdown')
1818 1818
1819 1819 return literal(
1820 1820 '<div class="markdown-block">%s</div>' %
1821 1821 maybe_convert_relative_links(
1822 1822 MarkupRenderer.markdown(source, flavored=True,
1823 1823 mentions=mentions)))
1824 1824 elif renderer == 'jupyter':
1825 1825 return literal(
1826 1826 '<div class="ipynb">%s</div>' %
1827 1827 maybe_convert_relative_links(
1828 1828 MarkupRenderer.jupyter(source)))
1829 1829
1830 1830 # None means just show the file-source
1831 1831 return None
1832 1832
1833 1833
1834 1834 def commit_status(repo, commit_id):
1835 1835 return ChangesetStatusModel().get_status(repo, commit_id)
1836 1836
1837 1837
1838 1838 def commit_status_lbl(commit_status):
1839 1839 return dict(ChangesetStatus.STATUSES).get(commit_status)
1840 1840
1841 1841
1842 1842 def commit_time(repo_name, commit_id):
1843 1843 repo = Repository.get_by_repo_name(repo_name)
1844 1844 commit = repo.get_commit(commit_id=commit_id)
1845 1845 return commit.date
1846 1846
1847 1847
1848 1848 def get_permission_name(key):
1849 1849 return dict(Permission.PERMS).get(key)
1850 1850
1851 1851
1852 1852 def journal_filter_help(request):
1853 1853 _ = request.translate
1854 1854
1855 1855 return _(
1856 1856 'Example filter terms:\n' +
1857 1857 ' repository:vcs\n' +
1858 1858 ' username:marcin\n' +
1859 1859 ' username:(NOT marcin)\n' +
1860 1860 ' action:*push*\n' +
1861 1861 ' ip:127.0.0.1\n' +
1862 1862 ' date:20120101\n' +
1863 1863 ' date:[20120101100000 TO 20120102]\n' +
1864 1864 '\n' +
1865 1865 'Generate wildcards using \'*\' character:\n' +
1866 1866 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1867 1867 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1868 1868 '\n' +
1869 1869 'Optional AND / OR operators in queries\n' +
1870 1870 ' "repository:vcs OR repository:test"\n' +
1871 1871 ' "username:test AND repository:test*"\n'
1872 1872 )
1873 1873
1874 1874
1875 1875 def search_filter_help(searcher, request):
1876 1876 _ = request.translate
1877 1877
1878 1878 terms = ''
1879 1879 return _(
1880 1880 'Example filter terms for `{searcher}` search:\n' +
1881 1881 '{terms}\n' +
1882 1882 'Generate wildcards using \'*\' character:\n' +
1883 1883 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1884 1884 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1885 1885 '\n' +
1886 1886 'Optional AND / OR operators in queries\n' +
1887 1887 ' "repo_name:vcs OR repo_name:test"\n' +
1888 1888 ' "owner:test AND repo_name:test*"\n' +
1889 1889 'More: {search_doc}'
1890 1890 ).format(searcher=searcher.name,
1891 1891 terms=terms, search_doc=searcher.query_lang_doc)
1892 1892
1893 1893
1894 1894 def not_mapped_error(repo_name):
1895 1895 from rhodecode.translation import _
1896 1896 flash(_('%s repository is not mapped to db perhaps'
1897 1897 ' it was created or renamed from the filesystem'
1898 1898 ' please run the application again'
1899 1899 ' in order to rescan repositories') % repo_name, category='error')
1900 1900
1901 1901
1902 1902 def ip_range(ip_addr):
1903 1903 from rhodecode.model.db import UserIpMap
1904 1904 s, e = UserIpMap._get_ip_range(ip_addr)
1905 1905 return '%s - %s' % (s, e)
1906 1906
1907 1907
1908 1908 def form(url, method='post', needs_csrf_token=True, **attrs):
1909 1909 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1910 1910 if method.lower() != 'get' and needs_csrf_token:
1911 1911 raise Exception(
1912 1912 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1913 1913 'CSRF token. If the endpoint does not require such token you can ' +
1914 1914 'explicitly set the parameter needs_csrf_token to false.')
1915 1915
1916 1916 return wh_form(url, method=method, **attrs)
1917 1917
1918 1918
1919 1919 def secure_form(form_url, method="POST", multipart=False, **attrs):
1920 1920 """Start a form tag that points the action to an url. This
1921 1921 form tag will also include the hidden field containing
1922 1922 the auth token.
1923 1923
1924 1924 The url options should be given either as a string, or as a
1925 1925 ``url()`` function. The method for the form defaults to POST.
1926 1926
1927 1927 Options:
1928 1928
1929 1929 ``multipart``
1930 1930 If set to True, the enctype is set to "multipart/form-data".
1931 1931 ``method``
1932 1932 The method to use when submitting the form, usually either
1933 1933 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1934 1934 hidden input with name _method is added to simulate the verb
1935 1935 over POST.
1936 1936
1937 1937 """
1938 1938 from webhelpers.pylonslib.secure_form import insecure_form
1939 1939
1940 1940 if 'request' in attrs:
1941 1941 session = attrs['request'].session
1942 1942 del attrs['request']
1943 1943 else:
1944 1944 raise ValueError(
1945 1945 'Calling this form requires request= to be passed as argument')
1946 1946
1947 1947 form = insecure_form(form_url, method, multipart, **attrs)
1948 1948 token = literal(
1949 1949 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1950 1950 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1951 1951
1952 1952 return literal("%s\n%s" % (form, token))
1953 1953
1954 1954
1955 1955 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1956 1956 select_html = select(name, selected, options, **attrs)
1957 1957 select2 = """
1958 1958 <script>
1959 1959 $(document).ready(function() {
1960 1960 $('#%s').select2({
1961 1961 containerCssClass: 'drop-menu',
1962 1962 dropdownCssClass: 'drop-menu-dropdown',
1963 1963 dropdownAutoWidth: true%s
1964 1964 });
1965 1965 });
1966 1966 </script>
1967 1967 """
1968 1968 filter_option = """,
1969 1969 minimumResultsForSearch: -1
1970 1970 """
1971 1971 input_id = attrs.get('id') or name
1972 1972 filter_enabled = "" if enable_filter else filter_option
1973 1973 select_script = literal(select2 % (input_id, filter_enabled))
1974 1974
1975 1975 return literal(select_html+select_script)
1976 1976
1977 1977
1978 1978 def get_visual_attr(tmpl_context_var, attr_name):
1979 1979 """
1980 1980 A safe way to get a variable from visual variable of template context
1981 1981
1982 1982 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1983 1983 :param attr_name: name of the attribute we fetch from the c.visual
1984 1984 """
1985 1985 visual = getattr(tmpl_context_var, 'visual', None)
1986 1986 if not visual:
1987 1987 return
1988 1988 else:
1989 1989 return getattr(visual, attr_name, None)
1990 1990
1991 1991
1992 1992 def get_last_path_part(file_node):
1993 1993 if not file_node.path:
1994 1994 return u''
1995 1995
1996 1996 path = safe_unicode(file_node.path.split('/')[-1])
1997 1997 return u'../' + path
1998 1998
1999 1999
2000 2000 def route_url(*args, **kwargs):
2001 2001 """
2002 2002 Wrapper around pyramids `route_url` (fully qualified url) function.
2003 2003 """
2004 2004 req = get_current_request()
2005 2005 return req.route_url(*args, **kwargs)
2006 2006
2007 2007
2008 2008 def route_path(*args, **kwargs):
2009 2009 """
2010 2010 Wrapper around pyramids `route_path` function.
2011 2011 """
2012 2012 req = get_current_request()
2013 2013 return req.route_path(*args, **kwargs)
2014 2014
2015 2015
2016 2016 def route_path_or_none(*args, **kwargs):
2017 2017 try:
2018 2018 return route_path(*args, **kwargs)
2019 2019 except KeyError:
2020 2020 return None
2021 2021
2022 2022
2023 2023 def current_route_path(request, **kw):
2024 2024 new_args = request.GET.mixed()
2025 2025 new_args.update(kw)
2026 2026 return request.current_route_path(_query=new_args)
2027 2027
2028 2028
2029 2029 def api_call_example(method, args):
2030 2030 """
2031 2031 Generates an API call example via CURL
2032 2032 """
2033 2033 args_json = json.dumps(OrderedDict([
2034 2034 ('id', 1),
2035 2035 ('auth_token', 'SECRET'),
2036 2036 ('method', method),
2037 2037 ('args', args)
2038 2038 ]))
2039 2039 return literal(
2040 2040 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2041 2041 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2042 2042 "and needs to be of `api calls` role."
2043 2043 .format(
2044 2044 api_url=route_url('apiv2'),
2045 2045 token_url=route_url('my_account_auth_tokens'),
2046 2046 data=args_json))
2047 2047
2048 2048
2049 2049 def notification_description(notification, request):
2050 2050 """
2051 2051 Generate notification human readable description based on notification type
2052 2052 """
2053 2053 from rhodecode.model.notification import NotificationModel
2054 2054 return NotificationModel().make_description(
2055 2055 notification, translate=request.translate)
2056 2056
2057 2057
2058 2058 def go_import_header(request, db_repo=None):
2059 2059 """
2060 2060 Creates a header for go-import functionality in Go Lang
2061 2061 """
2062 2062
2063 2063 if not db_repo:
2064 2064 return
2065 2065 if 'go-get' not in request.GET:
2066 2066 return
2067 2067
2068 2068 clone_url = db_repo.clone_url()
2069 2069 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2070 2070 # we have a repo and go-get flag,
2071 2071 return literal('<meta name="go-import" content="{} {} {}">'.format(
2072 2072 prefix, db_repo.repo_type, clone_url))
2073
2074
2075 def reviewer_as_json(*args, **kwargs):
2076 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2077 return _reviewer_as_json(*args, **kwargs)
@@ -1,267 +1,393 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import itertools
23 23 import logging
24 from collections import defaultdict
24 import collections
25 25
26 26 from rhodecode.model import BaseModel
27 27 from rhodecode.model.db import (
28 28 ChangesetStatus, ChangesetComment, PullRequest, Session)
29 29 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
30 30 from rhodecode.lib.markup_renderer import (
31 31 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 class ChangesetStatusModel(BaseModel):
37 37
38 38 cls = ChangesetStatus
39 39
40 40 def __get_changeset_status(self, changeset_status):
41 41 return self._get_instance(ChangesetStatus, changeset_status)
42 42
43 43 def __get_pull_request(self, pull_request):
44 44 return self._get_instance(PullRequest, pull_request)
45 45
46 46 def _get_status_query(self, repo, revision, pull_request,
47 47 with_revisions=False):
48 48 repo = self._get_repo(repo)
49 49
50 50 q = ChangesetStatus.query()\
51 51 .filter(ChangesetStatus.repo == repo)
52 52 if not with_revisions:
53 53 q = q.filter(ChangesetStatus.version == 0)
54 54
55 55 if revision:
56 56 q = q.filter(ChangesetStatus.revision == revision)
57 57 elif pull_request:
58 58 pull_request = self.__get_pull_request(pull_request)
59 59 # TODO: johbo: Think about the impact of this join, there must
60 60 # be a reason why ChangesetStatus and ChanagesetComment is linked
61 61 # to the pull request. Might be that we want to do the same for
62 62 # the pull_request_version_id.
63 63 q = q.join(ChangesetComment).filter(
64 64 ChangesetStatus.pull_request == pull_request,
65 65 ChangesetComment.pull_request_version_id == None)
66 66 else:
67 67 raise Exception('Please specify revision or pull_request')
68 68 q = q.order_by(ChangesetStatus.version.asc())
69 69 return q
70 70
71 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
72 trim_votes=True):
73 """
74 Calculate status based on given group members, and voting rule
75
76
77 group1 - 4 members, 3 required for approval
78 user1 - approved
79 user2 - reject
80 user3 - approved
81 user4 - rejected
82
83 final_state: rejected, reasons not at least 3 votes
84
85
86 group1 - 4 members, 2 required for approval
87 user1 - approved
88 user2 - reject
89 user3 - approved
90 user4 - rejected
91
92 final_state: approved, reasons got at least 2 approvals
93
94 group1 - 4 members, ALL required for approval
95 user1 - approved
96 user2 - reject
97 user3 - approved
98 user4 - rejected
99
100 final_state: rejected, reasons not all approvals
101
102
103 group1 - 4 members, ALL required for approval
104 user1 - approved
105 user2 - approved
106 user3 - approved
107 user4 - approved
108
109 final_state: approved, reason all approvals received
110
111 group1 - 4 members, 5 required for approval
112 (approval should be shorted to number of actual members)
113
114 user1 - approved
115 user2 - approved
116 user3 - approved
117 user4 - approved
118
119 final_state: approved, reason all approvals received
120
121 """
122 group_vote_data = {}
123 got_rule = False
124 members = collections.OrderedDict()
125 for review_obj, user, reasons, mandatory, statuses \
126 in group_statuses_by_reviewers:
127
128 if not got_rule:
129 group_vote_data = review_obj.rule_user_group_data()
130 got_rule = bool(group_vote_data)
131
132 members[user.user_id] = statuses
133
134 if not group_vote_data:
135 return []
136
137 required_votes = group_vote_data['vote_rule']
138 if required_votes == -1:
139 # -1 means all required, so we replace it with how many people
140 # are in the members
141 required_votes = len(members)
142
143 if trim_votes and required_votes > len(members):
144 # we require more votes than we have members in the group
145 # in this case we trim the required votes to the number of members
146 required_votes = len(members)
147
148 approvals = sum([
149 1 for statuses in members.values()
150 if statuses and
151 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
152
153 calculated_votes = []
154 # we have all votes from users, now check if we have enough votes
155 # to fill other
156 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
157 if approvals >= required_votes:
158 fill_in = ChangesetStatus.STATUS_APPROVED
159
160 for member, statuses in members.items():
161 if statuses:
162 ver, latest = statuses[0]
163 if fill_in == ChangesetStatus.STATUS_APPROVED:
164 calculated_votes.append(fill_in)
165 else:
166 calculated_votes.append(latest.status)
167 else:
168 calculated_votes.append(fill_in)
169
170 return calculated_votes
171
71 172 def calculate_status(self, statuses_by_reviewers):
72 173 """
73 174 Given the approval statuses from reviewers, calculates final approval
74 175 status. There can only be 3 results, all approved, all rejected. If
75 176 there is no consensus the PR is under review.
76 177
77 178 :param statuses_by_reviewers:
78 179 """
79 votes = defaultdict(int)
180
181 def group_rule(element):
182 review_obj = element[0]
183 rule_data = review_obj.rule_user_group_data()
184 if rule_data and rule_data['id']:
185 return rule_data['id']
186
187 voting_groups = itertools.groupby(
188 sorted(statuses_by_reviewers, key=group_rule), group_rule)
189
190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
191
80 192 reviewers_number = len(statuses_by_reviewers)
81 for user, reasons, mandatory, statuses in statuses_by_reviewers:
82 if statuses:
83 ver, latest = statuses[0]
84 votes[latest.status] += 1
193 votes = collections.defaultdict(int)
194 for group, group_statuses_by_reviewers in voting_by_groups:
195 if group:
196 # calculate how the "group" voted
197 for vote_status in self.calculate_group_vote(
198 group, group_statuses_by_reviewers):
199 votes[vote_status] += 1
85 200 else:
86 votes[ChangesetStatus.DEFAULT] += 1
201
202 for review_obj, user, reasons, mandatory, statuses \
203 in group_statuses_by_reviewers:
204 # individual vote
205 if statuses:
206 ver, latest = statuses[0]
207 votes[latest.status] += 1
87 208
88 # all approved
89 if votes.get(ChangesetStatus.STATUS_APPROVED) == reviewers_number:
209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
211
212 # TODO(marcink): with group voting, how does rejected work,
213 # do we ever get rejected state ?
214
215 if approved_votes_count == reviewers_number:
90 216 return ChangesetStatus.STATUS_APPROVED
91 217
92 # all rejected
93 if votes.get(ChangesetStatus.STATUS_REJECTED) == reviewers_number:
218 if rejected_votes_count == reviewers_number:
94 219 return ChangesetStatus.STATUS_REJECTED
95 220
96 221 return ChangesetStatus.STATUS_UNDER_REVIEW
97 222
98 223 def get_statuses(self, repo, revision=None, pull_request=None,
99 224 with_revisions=False):
100 225 q = self._get_status_query(repo, revision, pull_request,
101 226 with_revisions)
102 227 return q.all()
103 228
104 229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
105 230 """
106 231 Returns latest status of changeset for given revision or for given
107 232 pull request. Statuses are versioned inside a table itself and
108 233 version == 0 is always the current one
109 234
110 235 :param repo:
111 236 :param revision: 40char hash or None
112 237 :param pull_request: pull_request reference
113 238 :param as_str: return status as string not object
114 239 """
115 240 q = self._get_status_query(repo, revision, pull_request)
116 241
117 242 # need to use first here since there can be multiple statuses
118 243 # returned from pull_request
119 244 status = q.first()
120 245 if as_str:
121 246 status = status.status if status else status
122 247 st = status or ChangesetStatus.DEFAULT
123 248 return str(st)
124 249 return status
125 250
126 251 def _render_auto_status_message(
127 252 self, status, commit_id=None, pull_request=None):
128 253 """
129 254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
130 255 so it's always looking the same disregarding on which default
131 256 renderer system is using.
132 257
133 258 :param status: status text to change into
134 259 :param commit_id: the commit_id we change the status for
135 260 :param pull_request: the pull request we change the status for
136 261 """
137 262
138 263 new_status = ChangesetStatus.get_status_lbl(status)
139 264
140 265 params = {
141 266 'new_status_label': new_status,
142 267 'pull_request': pull_request,
143 268 'commit_id': commit_id,
144 269 }
145 270 renderer = RstTemplateRenderer()
146 271 return renderer.render('auto_status_change.mako', **params)
147 272
148 273 def set_status(self, repo, status, user, comment=None, revision=None,
149 274 pull_request=None, dont_allow_on_closed_pull_request=False):
150 275 """
151 276 Creates new status for changeset or updates the old ones bumping their
152 277 version, leaving the current status at
153 278
154 279 :param repo:
155 280 :param revision:
156 281 :param status:
157 282 :param user:
158 283 :param comment:
159 284 :param dont_allow_on_closed_pull_request: don't allow a status change
160 285 if last status was for pull request and it's closed. We shouldn't
161 286 mess around this manually
162 287 """
163 288 repo = self._get_repo(repo)
164 289
165 290 q = ChangesetStatus.query()
166 291
167 292 if revision:
168 293 q = q.filter(ChangesetStatus.repo == repo)
169 294 q = q.filter(ChangesetStatus.revision == revision)
170 295 elif pull_request:
171 296 pull_request = self.__get_pull_request(pull_request)
172 297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
173 298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
174 299 cur_statuses = q.all()
175 300
176 301 # if statuses exists and last is associated with a closed pull request
177 302 # we need to check if we can allow this status change
178 303 if (dont_allow_on_closed_pull_request and cur_statuses
179 304 and getattr(cur_statuses[0].pull_request, 'status', '')
180 305 == PullRequest.STATUS_CLOSED):
181 306 raise StatusChangeOnClosedPullRequestError(
182 307 'Changing status on closed pull request is not allowed'
183 308 )
184 309
185 310 # update all current statuses with older version
186 311 if cur_statuses:
187 312 for st in cur_statuses:
188 313 st.version += 1
189 314 Session().add(st)
190 315
191 316 def _create_status(user, repo, status, comment, revision, pull_request):
192 317 new_status = ChangesetStatus()
193 318 new_status.author = self._get_user(user)
194 319 new_status.repo = self._get_repo(repo)
195 320 new_status.status = status
196 321 new_status.comment = comment
197 322 new_status.revision = revision
198 323 new_status.pull_request = pull_request
199 324 return new_status
200 325
201 326 if not comment:
202 327 from rhodecode.model.comment import CommentsModel
203 328 comment = CommentsModel().create(
204 329 text=self._render_auto_status_message(
205 330 status, commit_id=revision, pull_request=pull_request),
206 331 repo=repo,
207 332 user=user,
208 333 pull_request=pull_request,
209 334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
210 335 )
211 336
212 337 if revision:
213 338 new_status = _create_status(
214 339 user=user, repo=repo, status=status, comment=comment,
215 340 revision=revision, pull_request=pull_request)
216 341 Session().add(new_status)
217 342 return new_status
218 343 elif pull_request:
219 344 # pull request can have more than one revision associated to it
220 345 # we need to create new version for each one
221 346 new_statuses = []
222 347 repo = pull_request.source_repo
223 348 for rev in pull_request.revisions:
224 349 new_status = _create_status(
225 350 user=user, repo=repo, status=status, comment=comment,
226 351 revision=rev, pull_request=pull_request)
227 352 new_statuses.append(new_status)
228 353 Session().add(new_status)
229 354 return new_statuses
230 355
231 356 def reviewers_statuses(self, pull_request):
232 357 _commit_statuses = self.get_statuses(
233 358 pull_request.source_repo,
234 359 pull_request=pull_request,
235 360 with_revisions=True)
236 361
237 commit_statuses = defaultdict(list)
362 commit_statuses = collections.defaultdict(list)
238 363 for st in _commit_statuses:
239 364 commit_statuses[st.author.username] += [st]
240 365
241 366 pull_request_reviewers = []
242 367
243 368 def version(commit_status):
244 369 return commit_status.version
245 370
246 for o in pull_request.reviewers:
247 if not o.user:
371 for obj in pull_request.reviewers:
372 if not obj.user:
248 373 continue
249 statuses = commit_statuses.get(o.user.username, None)
374 statuses = commit_statuses.get(obj.user.username, None)
250 375 if statuses:
251 statuses = [(x, list(y)[0])
252 for x, y in (itertools.groupby(
253 sorted(statuses, key=version),version))]
376 status_groups = itertools.groupby(
377 sorted(statuses, key=version), version)
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
254 379
255 380 pull_request_reviewers.append(
256 (o.user, o.reasons, o.mandatory, statuses))
381 (obj, obj.user, obj.reasons, obj.mandatory, statuses))
382
257 383 return pull_request_reviewers
258 384
259 385 def calculated_review_status(self, pull_request, reviewers_statuses=None):
260 386 """
261 387 calculate pull request status based on reviewers, it should be a list
262 388 of two element lists.
263 389
264 390 :param reviewers_statuses:
265 391 """
266 392 reviewers = reviewers_statuses or self.reviewers_statuses(pull_request)
267 393 return self.calculate_status(reviewers)
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,614 +1,615 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 this is forms validation classes
23 23 http://formencode.org/module-formencode.validators.html
24 24 for list off all availible validators
25 25
26 26 we can create our own validators
27 27
28 28 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 29 pre_validators [] These validators will be applied before the schema
30 30 chained_validators [] These validators will be applied after the schema
31 31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35 35
36 36
37 37 <name> = formencode.validators.<name of validator>
38 38 <name> must equal form name
39 39 list=[1,2,3,4,5]
40 40 for SELECT use formencode.All(OneOf(list), Int())
41 41
42 42 """
43 43
44 44 import deform
45 45 import logging
46 46 import formencode
47 47
48 48 from pkg_resources import resource_filename
49 49 from formencode import All, Pipe
50 50
51 51 from pyramid.threadlocal import get_current_request
52 52
53 53 from rhodecode import BACKENDS
54 54 from rhodecode.lib import helpers
55 55 from rhodecode.model import validators as v
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 deform_templates = resource_filename('deform', 'templates')
61 61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
62 62 search_path = (rhodecode_templates, deform_templates)
63 63
64 64
65 65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
66 66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
67 67 def __call__(self, template_name, **kw):
68 68 kw['h'] = helpers
69 69 kw['request'] = get_current_request()
70 70 return self.load(template_name)(**kw)
71 71
72 72
73 73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
74 74 deform.Form.set_default_renderer(form_renderer)
75 75
76 76
77 77 def LoginForm(localizer):
78 78 _ = localizer
79 79
80 80 class _LoginForm(formencode.Schema):
81 81 allow_extra_fields = True
82 82 filter_extra_fields = True
83 83 username = v.UnicodeString(
84 84 strip=True,
85 85 min=1,
86 86 not_empty=True,
87 87 messages={
88 88 'empty': _(u'Please enter a login'),
89 89 'tooShort': _(u'Enter a value %(min)i characters long or more')
90 90 }
91 91 )
92 92
93 93 password = v.UnicodeString(
94 94 strip=False,
95 95 min=3,
96 96 max=72,
97 97 not_empty=True,
98 98 messages={
99 99 'empty': _(u'Please enter a password'),
100 100 'tooShort': _(u'Enter %(min)i characters or more')}
101 101 )
102 102
103 103 remember = v.StringBoolean(if_missing=False)
104 104
105 105 chained_validators = [v.ValidAuth(localizer)]
106 106 return _LoginForm
107 107
108 108
109 109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
110 110 old_data = old_data or {}
111 111 available_languages = available_languages or []
112 112 _ = localizer
113 113
114 114 class _UserForm(formencode.Schema):
115 115 allow_extra_fields = True
116 116 filter_extra_fields = True
117 117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
118 118 v.ValidUsername(localizer, edit, old_data))
119 119 if edit:
120 120 new_password = All(
121 121 v.ValidPassword(localizer),
122 122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
123 123 )
124 124 password_confirmation = All(
125 125 v.ValidPassword(localizer),
126 126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
127 127 )
128 128 admin = v.StringBoolean(if_missing=False)
129 129 else:
130 130 password = All(
131 131 v.ValidPassword(localizer),
132 132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
133 133 )
134 134 password_confirmation = All(
135 135 v.ValidPassword(localizer),
136 136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
137 137 )
138 138
139 139 password_change = v.StringBoolean(if_missing=False)
140 140 create_repo_group = v.StringBoolean(if_missing=False)
141 141
142 142 active = v.StringBoolean(if_missing=False)
143 143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
145 145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
146 146 extern_name = v.UnicodeString(strip=True)
147 147 extern_type = v.UnicodeString(strip=True)
148 148 language = v.OneOf(available_languages, hideList=False,
149 149 testValueList=True, if_missing=None)
150 150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 151 return _UserForm
152 152
153 153
154 154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 155 old_data = old_data or {}
156 156 _ = localizer
157 157
158 158 class _UserGroupForm(formencode.Schema):
159 159 allow_extra_fields = True
160 160 filter_extra_fields = True
161 161
162 162 users_group_name = All(
163 163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 164 v.ValidUserGroup(localizer, edit, old_data)
165 165 )
166 166 user_group_description = v.UnicodeString(strip=True, min=1,
167 167 not_empty=False)
168 168
169 169 users_group_active = v.StringBoolean(if_missing=False)
170 170
171 171 if edit:
172 172 # this is user group owner
173 173 user = All(
174 174 v.UnicodeString(not_empty=True),
175 175 v.ValidRepoUser(localizer, allow_disabled))
176 176 return _UserGroupForm
177 177
178 178
179 179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 180 can_create_in_root=False, allow_disabled=False):
181 181 _ = localizer
182 182 old_data = old_data or {}
183 183 available_groups = available_groups or []
184 184
185 185 class _RepoGroupForm(formencode.Schema):
186 186 allow_extra_fields = True
187 187 filter_extra_fields = False
188 188
189 189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 190 v.SlugifyName(localizer),)
191 191 group_description = v.UnicodeString(strip=True, min=1,
192 192 not_empty=False)
193 193 group_copy_permissions = v.StringBoolean(if_missing=False)
194 194
195 195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 196 testValueList=True, not_empty=True)
197 197 enable_locking = v.StringBoolean(if_missing=False)
198 198 chained_validators = [
199 199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200 200
201 201 if edit:
202 202 # this is repo group owner
203 203 user = All(
204 204 v.UnicodeString(not_empty=True),
205 205 v.ValidRepoUser(localizer, allow_disabled))
206 206 return _RepoGroupForm
207 207
208 208
209 209 def RegisterForm(localizer, edit=False, old_data=None):
210 210 _ = localizer
211 211 old_data = old_data or {}
212 212
213 213 class _RegisterForm(formencode.Schema):
214 214 allow_extra_fields = True
215 215 filter_extra_fields = True
216 216 username = All(
217 217 v.ValidUsername(localizer, edit, old_data),
218 218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 219 )
220 220 password = All(
221 221 v.ValidPassword(localizer),
222 222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 223 )
224 224 password_confirmation = All(
225 225 v.ValidPassword(localizer),
226 226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 227 )
228 228 active = v.StringBoolean(if_missing=False)
229 229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232 232
233 233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 234 return _RegisterForm
235 235
236 236
237 237 def PasswordResetForm(localizer):
238 238 _ = localizer
239 239
240 240 class _PasswordResetForm(formencode.Schema):
241 241 allow_extra_fields = True
242 242 filter_extra_fields = True
243 243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 244 return _PasswordResetForm
245 245
246 246
247 247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
248 248 landing_revs=None, allow_disabled=False):
249 249 _ = localizer
250 250 old_data = old_data or {}
251 251 repo_groups = repo_groups or []
252 252 landing_revs = landing_revs or []
253 253 supported_backends = BACKENDS.keys()
254 254
255 255 class _RepoForm(formencode.Schema):
256 256 allow_extra_fields = True
257 257 filter_extra_fields = False
258 258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
259 259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
260 260 repo_group = All(v.CanWriteGroup(localizer, old_data),
261 261 v.OneOf(repo_groups, hideList=True))
262 262 repo_type = v.OneOf(supported_backends, required=False,
263 263 if_missing=old_data.get('repo_type'))
264 264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
265 265 repo_private = v.StringBoolean(if_missing=False)
266 266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
267 267 repo_copy_permissions = v.StringBoolean(if_missing=False)
268 268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
269 269
270 270 repo_enable_statistics = v.StringBoolean(if_missing=False)
271 271 repo_enable_downloads = v.StringBoolean(if_missing=False)
272 272 repo_enable_locking = v.StringBoolean(if_missing=False)
273 273
274 274 if edit:
275 275 # this is repo owner
276 276 user = All(
277 277 v.UnicodeString(not_empty=True),
278 278 v.ValidRepoUser(localizer, allow_disabled))
279 279 clone_uri_change = v.UnicodeString(
280 280 not_empty=False, if_missing=v.Missing)
281 281
282 282 chained_validators = [v.ValidCloneUri(localizer),
283 283 v.ValidRepoName(localizer, edit, old_data)]
284 284 return _RepoForm
285 285
286 286
287 287 def RepoPermsForm(localizer):
288 288 _ = localizer
289 289
290 290 class _RepoPermsForm(formencode.Schema):
291 291 allow_extra_fields = True
292 292 filter_extra_fields = False
293 293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
294 294 return _RepoPermsForm
295 295
296 296
297 297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
298 298 _ = localizer
299 299
300 300 class _RepoGroupPermsForm(formencode.Schema):
301 301 allow_extra_fields = True
302 302 filter_extra_fields = False
303 303 recursive = v.OneOf(valid_recursive_choices)
304 304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
305 305 return _RepoGroupPermsForm
306 306
307 307
308 308 def UserGroupPermsForm(localizer):
309 309 _ = localizer
310 310
311 311 class _UserPermsForm(formencode.Schema):
312 312 allow_extra_fields = True
313 313 filter_extra_fields = False
314 314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
315 315 return _UserPermsForm
316 316
317 317
318 318 def RepoFieldForm(localizer):
319 319 _ = localizer
320 320
321 321 class _RepoFieldForm(formencode.Schema):
322 322 filter_extra_fields = True
323 323 allow_extra_fields = True
324 324
325 325 new_field_key = All(v.FieldKey(localizer),
326 326 v.UnicodeString(strip=True, min=3, not_empty=True))
327 327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
328 328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
329 329 if_missing='str')
330 330 new_field_label = v.UnicodeString(not_empty=False)
331 331 new_field_desc = v.UnicodeString(not_empty=False)
332 332 return _RepoFieldForm
333 333
334 334
335 335 def RepoForkForm(localizer, edit=False, old_data=None,
336 336 supported_backends=BACKENDS.keys(), repo_groups=None,
337 337 landing_revs=None):
338 338 _ = localizer
339 339 old_data = old_data or {}
340 340 repo_groups = repo_groups or []
341 341 landing_revs = landing_revs or []
342 342
343 343 class _RepoForkForm(formencode.Schema):
344 344 allow_extra_fields = True
345 345 filter_extra_fields = False
346 346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
347 347 v.SlugifyName(localizer))
348 348 repo_group = All(v.CanWriteGroup(localizer, ),
349 349 v.OneOf(repo_groups, hideList=True))
350 350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
351 351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
352 352 private = v.StringBoolean(if_missing=False)
353 353 copy_permissions = v.StringBoolean(if_missing=False)
354 354 fork_parent_id = v.UnicodeString()
355 355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
356 356 landing_rev = v.OneOf(landing_revs, hideList=True)
357 357 return _RepoForkForm
358 358
359 359
360 360 def ApplicationSettingsForm(localizer):
361 361 _ = localizer
362 362
363 363 class _ApplicationSettingsForm(formencode.Schema):
364 364 allow_extra_fields = True
365 365 filter_extra_fields = False
366 366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
367 367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
368 368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
370 370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
372 372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
373 373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
374 374 return _ApplicationSettingsForm
375 375
376 376
377 377 def ApplicationVisualisationForm(localizer):
378 378 _ = localizer
379 379
380 380 class _ApplicationVisualisationForm(formencode.Schema):
381 381 allow_extra_fields = True
382 382 filter_extra_fields = False
383 383 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
384 384 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
385 385 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
386 386
387 387 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
388 388 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
389 389 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
390 390 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
391 391 rhodecode_show_version = v.StringBoolean(if_missing=False)
392 392 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
393 393 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
394 394 rhodecode_gravatar_url = v.UnicodeString(min=3)
395 395 rhodecode_clone_uri_tmpl = v.UnicodeString(min=3)
396 396 rhodecode_support_url = v.UnicodeString()
397 397 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
398 398 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
399 399 return _ApplicationVisualisationForm
400 400
401 401
402 402 class _BaseVcsSettingsForm(formencode.Schema):
403 403
404 404 allow_extra_fields = True
405 405 filter_extra_fields = False
406 406 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
407 407 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
408 408 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
409 409
410 410 # PR/Code-review
411 411 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
412 412 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
413 413
414 414 # hg
415 415 extensions_largefiles = v.StringBoolean(if_missing=False)
416 416 extensions_evolve = v.StringBoolean(if_missing=False)
417 417 phases_publish = v.StringBoolean(if_missing=False)
418 418
419 419 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
420 420 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
421 421
422 422 # git
423 423 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
424 424 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
425 425 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
426 426
427 427 # svn
428 428 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
429 429 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
430 430
431 431
432 432 def ApplicationUiSettingsForm(localizer):
433 433 _ = localizer
434 434
435 435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
436 436 web_push_ssl = v.StringBoolean(if_missing=False)
437 437 paths_root_path = All(
438 438 v.ValidPath(localizer),
439 439 v.UnicodeString(strip=True, min=1, not_empty=True)
440 440 )
441 441 largefiles_usercache = All(
442 442 v.ValidPath(localizer),
443 443 v.UnicodeString(strip=True, min=2, not_empty=True))
444 444 vcs_git_lfs_store_location = All(
445 445 v.ValidPath(localizer),
446 446 v.UnicodeString(strip=True, min=2, not_empty=True))
447 447 extensions_hgsubversion = v.StringBoolean(if_missing=False)
448 448 extensions_hggit = v.StringBoolean(if_missing=False)
449 449 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
450 450 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
451 451 return _ApplicationUiSettingsForm
452 452
453 453
454 454 def RepoVcsSettingsForm(localizer, repo_name):
455 455 _ = localizer
456 456
457 457 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
458 458 inherit_global_settings = v.StringBoolean(if_missing=False)
459 459 new_svn_branch = v.ValidSvnPattern(localizer,
460 460 section='vcs_svn_branch', repo_name=repo_name)
461 461 new_svn_tag = v.ValidSvnPattern(localizer,
462 462 section='vcs_svn_tag', repo_name=repo_name)
463 463 return _RepoVcsSettingsForm
464 464
465 465
466 466 def LabsSettingsForm(localizer):
467 467 _ = localizer
468 468
469 469 class _LabSettingsForm(formencode.Schema):
470 470 allow_extra_fields = True
471 471 filter_extra_fields = False
472 472 return _LabSettingsForm
473 473
474 474
475 475 def ApplicationPermissionsForm(
476 476 localizer, register_choices, password_reset_choices,
477 477 extern_activate_choices):
478 478 _ = localizer
479 479
480 480 class _DefaultPermissionsForm(formencode.Schema):
481 481 allow_extra_fields = True
482 482 filter_extra_fields = True
483 483
484 484 anonymous = v.StringBoolean(if_missing=False)
485 485 default_register = v.OneOf(register_choices)
486 486 default_register_message = v.UnicodeString()
487 487 default_password_reset = v.OneOf(password_reset_choices)
488 488 default_extern_activate = v.OneOf(extern_activate_choices)
489 489 return _DefaultPermissionsForm
490 490
491 491
492 492 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
493 493 user_group_perms_choices):
494 494 _ = localizer
495 495
496 496 class _ObjectPermissionsForm(formencode.Schema):
497 497 allow_extra_fields = True
498 498 filter_extra_fields = True
499 499 overwrite_default_repo = v.StringBoolean(if_missing=False)
500 500 overwrite_default_group = v.StringBoolean(if_missing=False)
501 501 overwrite_default_user_group = v.StringBoolean(if_missing=False)
502 502 default_repo_perm = v.OneOf(repo_perms_choices)
503 503 default_group_perm = v.OneOf(group_perms_choices)
504 504 default_user_group_perm = v.OneOf(user_group_perms_choices)
505 505 return _ObjectPermissionsForm
506 506
507 507
508 508 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
509 509 repo_group_create_choices, user_group_create_choices,
510 510 fork_choices, inherit_default_permissions_choices):
511 511 _ = localizer
512 512
513 513 class _DefaultPermissionsForm(formencode.Schema):
514 514 allow_extra_fields = True
515 515 filter_extra_fields = True
516 516
517 517 anonymous = v.StringBoolean(if_missing=False)
518 518
519 519 default_repo_create = v.OneOf(create_choices)
520 520 default_repo_create_on_write = v.OneOf(create_on_write_choices)
521 521 default_user_group_create = v.OneOf(user_group_create_choices)
522 522 default_repo_group_create = v.OneOf(repo_group_create_choices)
523 523 default_fork_create = v.OneOf(fork_choices)
524 524 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
525 525 return _DefaultPermissionsForm
526 526
527 527
528 528 def UserIndividualPermissionsForm(localizer):
529 529 _ = localizer
530 530
531 531 class _DefaultPermissionsForm(formencode.Schema):
532 532 allow_extra_fields = True
533 533 filter_extra_fields = True
534 534
535 535 inherit_default_permissions = v.StringBoolean(if_missing=False)
536 536 return _DefaultPermissionsForm
537 537
538 538
539 539 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
540 540 _ = localizer
541 541 old_data = old_data or {}
542 542
543 543 class _DefaultsForm(formencode.Schema):
544 544 allow_extra_fields = True
545 545 filter_extra_fields = True
546 546 default_repo_type = v.OneOf(supported_backends)
547 547 default_repo_private = v.StringBoolean(if_missing=False)
548 548 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
549 549 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
550 550 default_repo_enable_locking = v.StringBoolean(if_missing=False)
551 551 return _DefaultsForm
552 552
553 553
554 554 def AuthSettingsForm(localizer):
555 555 _ = localizer
556 556
557 557 class _AuthSettingsForm(formencode.Schema):
558 558 allow_extra_fields = True
559 559 filter_extra_fields = True
560 560 auth_plugins = All(v.ValidAuthPlugins(localizer),
561 561 v.UniqueListFromString(localizer)(not_empty=True))
562 562 return _AuthSettingsForm
563 563
564 564
565 565 def UserExtraEmailForm(localizer):
566 566 _ = localizer
567 567
568 568 class _UserExtraEmailForm(formencode.Schema):
569 569 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
570 570 return _UserExtraEmailForm
571 571
572 572
573 573 def UserExtraIpForm(localizer):
574 574 _ = localizer
575 575
576 576 class _UserExtraIpForm(formencode.Schema):
577 577 ip = v.ValidIp(localizer)(not_empty=True)
578 578 return _UserExtraIpForm
579 579
580 580
581 581 def PullRequestForm(localizer, repo_id):
582 582 _ = localizer
583 583
584 584 class ReviewerForm(formencode.Schema):
585 585 user_id = v.Int(not_empty=True)
586 586 reasons = All()
587 rules = All(v.UniqueList(localizer, convert=int)())
587 588 mandatory = v.StringBoolean()
588 589
589 590 class _PullRequestForm(formencode.Schema):
590 591 allow_extra_fields = True
591 592 filter_extra_fields = True
592 593
593 594 common_ancestor = v.UnicodeString(strip=True, required=True)
594 595 source_repo = v.UnicodeString(strip=True, required=True)
595 596 source_ref = v.UnicodeString(strip=True, required=True)
596 597 target_repo = v.UnicodeString(strip=True, required=True)
597 598 target_ref = v.UnicodeString(strip=True, required=True)
598 599 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
599 600 v.UniqueList(localizer)(not_empty=True))
600 601 review_members = formencode.ForEach(ReviewerForm())
601 602 pullrequest_title = v.UnicodeString(strip=True, required=True, min=3, max=255)
602 603 pullrequest_desc = v.UnicodeString(strip=True, required=False)
603 604
604 605 return _PullRequestForm
605 606
606 607
607 608 def IssueTrackerPatternsForm(localizer):
608 609 _ = localizer
609 610
610 611 class _IssueTrackerPatternsForm(formencode.Schema):
611 612 allow_extra_fields = True
612 613 filter_extra_fields = False
613 614 chained_validators = [v.ValidPattern(localizer)]
614 615 return _IssueTrackerPatternsForm
@@ -1,1654 +1,1681 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34
35 35 from rhodecode import events
36 36 from rhodecode.translation import lazy_ugettext#, _
37 37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 38 from rhodecode.lib import audit_logger
39 39 from rhodecode.lib.compat import OrderedDict
40 40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 41 from rhodecode.lib.markup_renderer import (
42 42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 44 from rhodecode.lib.vcs.backends.base import (
45 45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 47 from rhodecode.lib.vcs.exceptions import (
48 48 CommitDoesNotExistError, EmptyRepositoryError)
49 49 from rhodecode.model import BaseModel
50 50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 51 from rhodecode.model.comment import CommentsModel
52 52 from rhodecode.model.db import (
53 53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 55 from rhodecode.model.meta import Session
56 56 from rhodecode.model.notification import NotificationModel, \
57 57 EmailNotificationModel
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.settings import VcsSettingsModel
60 60
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 # Data structure to hold the response data when updating commits during a pull
66 66 # request update.
67 67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 68 'executed', 'reason', 'new', 'old', 'changes',
69 69 'source_changed', 'target_changed'])
70 70
71 71
72 72 class PullRequestModel(BaseModel):
73 73
74 74 cls = PullRequest
75 75
76 76 DIFF_CONTEXT = 3
77 77
78 78 MERGE_STATUS_MESSAGES = {
79 79 MergeFailureReason.NONE: lazy_ugettext(
80 80 'This pull request can be automatically merged.'),
81 81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 82 'This pull request cannot be merged because of an unhandled'
83 83 ' exception.'),
84 84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 85 'This pull request cannot be merged because of merge conflicts.'),
86 86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 87 'This pull request could not be merged because push to target'
88 88 ' failed.'),
89 89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 90 'This pull request cannot be merged because the target is not a'
91 91 ' head.'),
92 92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 93 'This pull request cannot be merged because the source contains'
94 94 ' more branches than the target.'),
95 95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 96 'This pull request cannot be merged because the target has'
97 97 ' multiple heads.'),
98 98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 99 'This pull request cannot be merged because the target repository'
100 100 ' is locked.'),
101 101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 102 'This pull request cannot be merged because the target or the '
103 103 'source reference is missing.'),
104 104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 105 'This pull request cannot be merged because the target '
106 106 'reference is missing.'),
107 107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 108 'This pull request cannot be merged because the source '
109 109 'reference is missing.'),
110 110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 111 'This pull request cannot be merged because of conflicts related '
112 112 'to sub repositories.'),
113 113 }
114 114
115 115 UPDATE_STATUS_MESSAGES = {
116 116 UpdateFailureReason.NONE: lazy_ugettext(
117 117 'Pull request update successful.'),
118 118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 119 'Pull request update failed because of an unknown error.'),
120 120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 121 'No update needed because the source and target have not changed.'),
122 122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 123 'Pull request cannot be updated because the reference type is '
124 124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 126 'This pull request cannot be updated because the target '
127 127 'reference is missing.'),
128 128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 129 'This pull request cannot be updated because the source '
130 130 'reference is missing.'),
131 131 }
132 132
133 133 def __get_pull_request(self, pull_request):
134 134 return self._get_instance((
135 135 PullRequest, PullRequestVersion), pull_request)
136 136
137 137 def _check_perms(self, perms, pull_request, user, api=False):
138 138 if not api:
139 139 return h.HasRepoPermissionAny(*perms)(
140 140 user=user, repo_name=pull_request.target_repo.repo_name)
141 141 else:
142 142 return h.HasRepoPermissionAnyApi(*perms)(
143 143 user=user, repo_name=pull_request.target_repo.repo_name)
144 144
145 145 def check_user_read(self, pull_request, user, api=False):
146 146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 147 return self._check_perms(_perms, pull_request, user, api)
148 148
149 149 def check_user_merge(self, pull_request, user, api=False):
150 150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 151 return self._check_perms(_perms, pull_request, user, api)
152 152
153 153 def check_user_update(self, pull_request, user, api=False):
154 154 owner = user.user_id == pull_request.user_id
155 155 return self.check_user_merge(pull_request, user, api) or owner
156 156
157 157 def check_user_delete(self, pull_request, user):
158 158 owner = user.user_id == pull_request.user_id
159 159 _perms = ('repository.admin',)
160 160 return self._check_perms(_perms, pull_request, user) or owner
161 161
162 162 def check_user_change_status(self, pull_request, user, api=False):
163 163 reviewer = user.user_id in [x.user_id for x in
164 164 pull_request.reviewers]
165 165 return self.check_user_update(pull_request, user, api) or reviewer
166 166
167 167 def check_user_comment(self, pull_request, user):
168 168 owner = user.user_id == pull_request.user_id
169 169 return self.check_user_read(pull_request, user) or owner
170 170
171 171 def get(self, pull_request):
172 172 return self.__get_pull_request(pull_request)
173 173
174 174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 175 opened_by=None, order_by=None,
176 176 order_dir='desc'):
177 177 repo = None
178 178 if repo_name:
179 179 repo = self._get_repo(repo_name)
180 180
181 181 q = PullRequest.query()
182 182
183 183 # source or target
184 184 if repo and source:
185 185 q = q.filter(PullRequest.source_repo == repo)
186 186 elif repo:
187 187 q = q.filter(PullRequest.target_repo == repo)
188 188
189 189 # closed,opened
190 190 if statuses:
191 191 q = q.filter(PullRequest.status.in_(statuses))
192 192
193 193 # opened by filter
194 194 if opened_by:
195 195 q = q.filter(PullRequest.user_id.in_(opened_by))
196 196
197 197 if order_by:
198 198 order_map = {
199 199 'name_raw': PullRequest.pull_request_id,
200 200 'title': PullRequest.title,
201 201 'updated_on_raw': PullRequest.updated_on,
202 202 'target_repo': PullRequest.target_repo_id
203 203 }
204 204 if order_dir == 'asc':
205 205 q = q.order_by(order_map[order_by].asc())
206 206 else:
207 207 q = q.order_by(order_map[order_by].desc())
208 208
209 209 return q
210 210
211 211 def count_all(self, repo_name, source=False, statuses=None,
212 212 opened_by=None):
213 213 """
214 214 Count the number of pull requests for a specific repository.
215 215
216 216 :param repo_name: target or source repo
217 217 :param source: boolean flag to specify if repo_name refers to source
218 218 :param statuses: list of pull request statuses
219 219 :param opened_by: author user of the pull request
220 220 :returns: int number of pull requests
221 221 """
222 222 q = self._prepare_get_all_query(
223 223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224 224
225 225 return q.count()
226 226
227 227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 228 offset=0, length=None, order_by=None, order_dir='desc'):
229 229 """
230 230 Get all pull requests for a specific repository.
231 231
232 232 :param repo_name: target or source repo
233 233 :param source: boolean flag to specify if repo_name refers to source
234 234 :param statuses: list of pull request statuses
235 235 :param opened_by: author user of the pull request
236 236 :param offset: pagination offset
237 237 :param length: length of returned list
238 238 :param order_by: order of the returned list
239 239 :param order_dir: 'asc' or 'desc' ordering direction
240 240 :returns: list of pull requests
241 241 """
242 242 q = self._prepare_get_all_query(
243 243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 244 order_by=order_by, order_dir=order_dir)
245 245
246 246 if length:
247 247 pull_requests = q.limit(length).offset(offset).all()
248 248 else:
249 249 pull_requests = q.all()
250 250
251 251 return pull_requests
252 252
253 253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 254 opened_by=None):
255 255 """
256 256 Count the number of pull requests for a specific repository that are
257 257 awaiting review.
258 258
259 259 :param repo_name: target or source repo
260 260 :param source: boolean flag to specify if repo_name refers to source
261 261 :param statuses: list of pull request statuses
262 262 :param opened_by: author user of the pull request
263 263 :returns: int number of pull requests
264 264 """
265 265 pull_requests = self.get_awaiting_review(
266 266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267 267
268 268 return len(pull_requests)
269 269
270 270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 271 opened_by=None, offset=0, length=None,
272 272 order_by=None, order_dir='desc'):
273 273 """
274 274 Get all pull requests for a specific repository that are awaiting
275 275 review.
276 276
277 277 :param repo_name: target or source repo
278 278 :param source: boolean flag to specify if repo_name refers to source
279 279 :param statuses: list of pull request statuses
280 280 :param opened_by: author user of the pull request
281 281 :param offset: pagination offset
282 282 :param length: length of returned list
283 283 :param order_by: order of the returned list
284 284 :param order_dir: 'asc' or 'desc' ordering direction
285 285 :returns: list of pull requests
286 286 """
287 287 pull_requests = self.get_all(
288 288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 289 order_by=order_by, order_dir=order_dir)
290 290
291 291 _filtered_pull_requests = []
292 292 for pr in pull_requests:
293 293 status = pr.calculated_review_status()
294 294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 296 _filtered_pull_requests.append(pr)
297 297 if length:
298 298 return _filtered_pull_requests[offset:offset+length]
299 299 else:
300 300 return _filtered_pull_requests
301 301
302 302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 303 opened_by=None, user_id=None):
304 304 """
305 305 Count the number of pull requests for a specific repository that are
306 306 awaiting review from a specific user.
307 307
308 308 :param repo_name: target or source repo
309 309 :param source: boolean flag to specify if repo_name refers to source
310 310 :param statuses: list of pull request statuses
311 311 :param opened_by: author user of the pull request
312 312 :param user_id: reviewer user of the pull request
313 313 :returns: int number of pull requests
314 314 """
315 315 pull_requests = self.get_awaiting_my_review(
316 316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 317 user_id=user_id)
318 318
319 319 return len(pull_requests)
320 320
321 321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 322 opened_by=None, user_id=None, offset=0,
323 323 length=None, order_by=None, order_dir='desc'):
324 324 """
325 325 Get all pull requests for a specific repository that are awaiting
326 326 review from a specific user.
327 327
328 328 :param repo_name: target or source repo
329 329 :param source: boolean flag to specify if repo_name refers to source
330 330 :param statuses: list of pull request statuses
331 331 :param opened_by: author user of the pull request
332 332 :param user_id: reviewer user of the pull request
333 333 :param offset: pagination offset
334 334 :param length: length of returned list
335 335 :param order_by: order of the returned list
336 336 :param order_dir: 'asc' or 'desc' ordering direction
337 337 :returns: list of pull requests
338 338 """
339 339 pull_requests = self.get_all(
340 340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 341 order_by=order_by, order_dir=order_dir)
342 342
343 343 _my = PullRequestModel().get_not_reviewed(user_id)
344 344 my_participation = []
345 345 for pr in pull_requests:
346 346 if pr in _my:
347 347 my_participation.append(pr)
348 348 _filtered_pull_requests = my_participation
349 349 if length:
350 350 return _filtered_pull_requests[offset:offset+length]
351 351 else:
352 352 return _filtered_pull_requests
353 353
354 354 def get_not_reviewed(self, user_id):
355 355 return [
356 356 x.pull_request for x in PullRequestReviewers.query().filter(
357 357 PullRequestReviewers.user_id == user_id).all()
358 358 ]
359 359
360 360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 361 order_by=None, order_dir='desc'):
362 362 q = PullRequest.query()
363 363 if user_id:
364 364 reviewers_subquery = Session().query(
365 365 PullRequestReviewers.pull_request_id).filter(
366 366 PullRequestReviewers.user_id == user_id).subquery()
367 367 user_filter = or_(
368 368 PullRequest.user_id == user_id,
369 369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 370 )
371 371 q = PullRequest.query().filter(user_filter)
372 372
373 373 # closed,opened
374 374 if statuses:
375 375 q = q.filter(PullRequest.status.in_(statuses))
376 376
377 377 if order_by:
378 378 order_map = {
379 379 'name_raw': PullRequest.pull_request_id,
380 380 'title': PullRequest.title,
381 381 'updated_on_raw': PullRequest.updated_on,
382 382 'target_repo': PullRequest.target_repo_id
383 383 }
384 384 if order_dir == 'asc':
385 385 q = q.order_by(order_map[order_by].asc())
386 386 else:
387 387 q = q.order_by(order_map[order_by].desc())
388 388
389 389 return q
390 390
391 391 def count_im_participating_in(self, user_id=None, statuses=None):
392 392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 393 return q.count()
394 394
395 395 def get_im_participating_in(
396 396 self, user_id=None, statuses=None, offset=0,
397 397 length=None, order_by=None, order_dir='desc'):
398 398 """
399 399 Get all Pull requests that i'm participating in, or i have opened
400 400 """
401 401
402 402 q = self._prepare_participating_query(
403 403 user_id, statuses=statuses, order_by=order_by,
404 404 order_dir=order_dir)
405 405
406 406 if length:
407 407 pull_requests = q.limit(length).offset(offset).all()
408 408 else:
409 409 pull_requests = q.all()
410 410
411 411 return pull_requests
412 412
413 413 def get_versions(self, pull_request):
414 414 """
415 415 returns version of pull request sorted by ID descending
416 416 """
417 417 return PullRequestVersion.query()\
418 418 .filter(PullRequestVersion.pull_request == pull_request)\
419 419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 420 .all()
421 421
422 422 def get_pr_version(self, pull_request_id, version=None):
423 423 at_version = None
424 424
425 425 if version and version == 'latest':
426 426 pull_request_ver = PullRequest.get(pull_request_id)
427 427 pull_request_obj = pull_request_ver
428 428 _org_pull_request_obj = pull_request_obj
429 429 at_version = 'latest'
430 430 elif version:
431 431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 432 pull_request_obj = pull_request_ver
433 433 _org_pull_request_obj = pull_request_ver.pull_request
434 434 at_version = pull_request_ver.pull_request_version_id
435 435 else:
436 436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 437 pull_request_id)
438 438
439 439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 440 pull_request_obj, _org_pull_request_obj)
441 441
442 442 return _org_pull_request_obj, pull_request_obj, \
443 443 pull_request_display_obj, at_version
444 444
445 445 def create(self, created_by, source_repo, source_ref, target_repo,
446 446 target_ref, revisions, reviewers, title, description=None,
447 447 reviewer_data=None, translator=None):
448 448 translator = translator or get_current_request().translate
449 449
450 450 created_by_user = self._get_user(created_by)
451 451 source_repo = self._get_repo(source_repo)
452 452 target_repo = self._get_repo(target_repo)
453 453
454 454 pull_request = PullRequest()
455 455 pull_request.source_repo = source_repo
456 456 pull_request.source_ref = source_ref
457 457 pull_request.target_repo = target_repo
458 458 pull_request.target_ref = target_ref
459 459 pull_request.revisions = revisions
460 460 pull_request.title = title
461 461 pull_request.description = description
462 462 pull_request.author = created_by_user
463 463 pull_request.reviewer_data = reviewer_data
464 464
465 465 Session().add(pull_request)
466 466 Session().flush()
467 467
468 468 reviewer_ids = set()
469 469 # members / reviewers
470 470 for reviewer_object in reviewers:
471 user_id, reasons, mandatory = reviewer_object
471 user_id, reasons, mandatory, rules = reviewer_object
472 472 user = self._get_user(user_id)
473 473
474 474 # skip duplicates
475 475 if user.user_id in reviewer_ids:
476 476 continue
477 477
478 478 reviewer_ids.add(user.user_id)
479 479
480 480 reviewer = PullRequestReviewers()
481 481 reviewer.user = user
482 482 reviewer.pull_request = pull_request
483 483 reviewer.reasons = reasons
484 484 reviewer.mandatory = mandatory
485
486 # NOTE(marcink): pick only first rule for now
487 rule_id = rules[0] if rules else None
488 rule = RepoReviewRule.get(rule_id) if rule_id else None
489 if rule:
490 review_group = rule.user_group_vote_rule()
491 if review_group:
492 # NOTE(marcink):
493 # again, can be that user is member of more,
494 # but we pick the first same, as default reviewers algo
495 review_group = review_group[0]
496
497 rule_data = {
498 'rule_name':
499 rule.review_rule_name,
500 'rule_user_group_entry_id':
501 review_group.repo_review_rule_users_group_id,
502 'rule_user_group_name':
503 review_group.users_group.users_group_name,
504 'rule_user_group_members':
505 [x.user.username for x in review_group.users_group.members],
506 }
507 # e.g {'vote_rule': -1, 'mandatory': True}
508 rule_data.update(review_group.rule_data())
509
510 reviewer.rule_data = rule_data
511
485 512 Session().add(reviewer)
486 513
487 514 # Set approval status to "Under Review" for all commits which are
488 515 # part of this pull request.
489 516 ChangesetStatusModel().set_status(
490 517 repo=target_repo,
491 518 status=ChangesetStatus.STATUS_UNDER_REVIEW,
492 519 user=created_by_user,
493 520 pull_request=pull_request
494 521 )
495 522
496 523 MergeCheck.validate(
497 524 pull_request, user=created_by_user, translator=translator)
498 525
499 526 self.notify_reviewers(pull_request, reviewer_ids)
500 527 self._trigger_pull_request_hook(
501 528 pull_request, created_by_user, 'create')
502 529
503 530 creation_data = pull_request.get_api_data(with_merge_state=False)
504 531 self._log_audit_action(
505 532 'repo.pull_request.create', {'data': creation_data},
506 533 created_by_user, pull_request)
507 534
508 535 return pull_request
509 536
510 537 def _trigger_pull_request_hook(self, pull_request, user, action):
511 538 pull_request = self.__get_pull_request(pull_request)
512 539 target_scm = pull_request.target_repo.scm_instance()
513 540 if action == 'create':
514 541 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
515 542 elif action == 'merge':
516 543 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
517 544 elif action == 'close':
518 545 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
519 546 elif action == 'review_status_change':
520 547 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
521 548 elif action == 'update':
522 549 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
523 550 else:
524 551 return
525 552
526 553 trigger_hook(
527 554 username=user.username,
528 555 repo_name=pull_request.target_repo.repo_name,
529 556 repo_alias=target_scm.alias,
530 557 pull_request=pull_request)
531 558
532 559 def _get_commit_ids(self, pull_request):
533 560 """
534 561 Return the commit ids of the merged pull request.
535 562
536 563 This method is not dealing correctly yet with the lack of autoupdates
537 564 nor with the implicit target updates.
538 565 For example: if a commit in the source repo is already in the target it
539 566 will be reported anyways.
540 567 """
541 568 merge_rev = pull_request.merge_rev
542 569 if merge_rev is None:
543 570 raise ValueError('This pull request was not merged yet')
544 571
545 572 commit_ids = list(pull_request.revisions)
546 573 if merge_rev not in commit_ids:
547 574 commit_ids.append(merge_rev)
548 575
549 576 return commit_ids
550 577
551 578 def merge(self, pull_request, user, extras):
552 579 log.debug("Merging pull request %s", pull_request.pull_request_id)
553 580 merge_state = self._merge_pull_request(pull_request, user, extras)
554 581 if merge_state.executed:
555 582 log.debug(
556 583 "Merge was successful, updating the pull request comments.")
557 584 self._comment_and_close_pr(pull_request, user, merge_state)
558 585
559 586 self._log_audit_action(
560 587 'repo.pull_request.merge',
561 588 {'merge_state': merge_state.__dict__},
562 589 user, pull_request)
563 590
564 591 else:
565 592 log.warn("Merge failed, not updating the pull request.")
566 593 return merge_state
567 594
568 595 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
569 596 target_vcs = pull_request.target_repo.scm_instance()
570 597 source_vcs = pull_request.source_repo.scm_instance()
571 598 target_ref = self._refresh_reference(
572 599 pull_request.target_ref_parts, target_vcs)
573 600
574 601 message = merge_msg or (
575 602 'Merge pull request #%(pr_id)s from '
576 603 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
577 604 'pr_id': pull_request.pull_request_id,
578 605 'source_repo': source_vcs.name,
579 606 'source_ref_name': pull_request.source_ref_parts.name,
580 607 'pr_title': pull_request.title
581 608 }
582 609
583 610 workspace_id = self._workspace_id(pull_request)
584 611 use_rebase = self._use_rebase_for_merging(pull_request)
585 612 close_branch = self._close_branch_before_merging(pull_request)
586 613
587 614 callback_daemon, extras = prepare_callback_daemon(
588 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
589 616 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
590 617
591 618 with callback_daemon:
592 619 # TODO: johbo: Implement a clean way to run a config_override
593 620 # for a single call.
594 621 target_vcs.config.set(
595 622 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
596 623 merge_state = target_vcs.merge(
597 624 target_ref, source_vcs, pull_request.source_ref_parts,
598 625 workspace_id, user_name=user.username,
599 626 user_email=user.email, message=message, use_rebase=use_rebase,
600 627 close_branch=close_branch)
601 628 return merge_state
602 629
603 630 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
604 631 pull_request.merge_rev = merge_state.merge_ref.commit_id
605 632 pull_request.updated_on = datetime.datetime.now()
606 633 close_msg = close_msg or 'Pull request merged and closed'
607 634
608 635 CommentsModel().create(
609 636 text=safe_unicode(close_msg),
610 637 repo=pull_request.target_repo.repo_id,
611 638 user=user.user_id,
612 639 pull_request=pull_request.pull_request_id,
613 640 f_path=None,
614 641 line_no=None,
615 642 closing_pr=True
616 643 )
617 644
618 645 Session().add(pull_request)
619 646 Session().flush()
620 647 # TODO: paris: replace invalidation with less radical solution
621 648 ScmModel().mark_for_invalidation(
622 649 pull_request.target_repo.repo_name)
623 650 self._trigger_pull_request_hook(pull_request, user, 'merge')
624 651
625 652 def has_valid_update_type(self, pull_request):
626 653 source_ref_type = pull_request.source_ref_parts.type
627 654 return source_ref_type in ['book', 'branch', 'tag']
628 655
629 656 def update_commits(self, pull_request):
630 657 """
631 658 Get the updated list of commits for the pull request
632 659 and return the new pull request version and the list
633 660 of commits processed by this update action
634 661 """
635 662 pull_request = self.__get_pull_request(pull_request)
636 663 source_ref_type = pull_request.source_ref_parts.type
637 664 source_ref_name = pull_request.source_ref_parts.name
638 665 source_ref_id = pull_request.source_ref_parts.commit_id
639 666
640 667 target_ref_type = pull_request.target_ref_parts.type
641 668 target_ref_name = pull_request.target_ref_parts.name
642 669 target_ref_id = pull_request.target_ref_parts.commit_id
643 670
644 671 if not self.has_valid_update_type(pull_request):
645 672 log.debug(
646 673 "Skipping update of pull request %s due to ref type: %s",
647 674 pull_request, source_ref_type)
648 675 return UpdateResponse(
649 676 executed=False,
650 677 reason=UpdateFailureReason.WRONG_REF_TYPE,
651 678 old=pull_request, new=None, changes=None,
652 679 source_changed=False, target_changed=False)
653 680
654 681 # source repo
655 682 source_repo = pull_request.source_repo.scm_instance()
656 683 try:
657 684 source_commit = source_repo.get_commit(commit_id=source_ref_name)
658 685 except CommitDoesNotExistError:
659 686 return UpdateResponse(
660 687 executed=False,
661 688 reason=UpdateFailureReason.MISSING_SOURCE_REF,
662 689 old=pull_request, new=None, changes=None,
663 690 source_changed=False, target_changed=False)
664 691
665 692 source_changed = source_ref_id != source_commit.raw_id
666 693
667 694 # target repo
668 695 target_repo = pull_request.target_repo.scm_instance()
669 696 try:
670 697 target_commit = target_repo.get_commit(commit_id=target_ref_name)
671 698 except CommitDoesNotExistError:
672 699 return UpdateResponse(
673 700 executed=False,
674 701 reason=UpdateFailureReason.MISSING_TARGET_REF,
675 702 old=pull_request, new=None, changes=None,
676 703 source_changed=False, target_changed=False)
677 704 target_changed = target_ref_id != target_commit.raw_id
678 705
679 706 if not (source_changed or target_changed):
680 707 log.debug("Nothing changed in pull request %s", pull_request)
681 708 return UpdateResponse(
682 709 executed=False,
683 710 reason=UpdateFailureReason.NO_CHANGE,
684 711 old=pull_request, new=None, changes=None,
685 712 source_changed=target_changed, target_changed=source_changed)
686 713
687 714 change_in_found = 'target repo' if target_changed else 'source repo'
688 715 log.debug('Updating pull request because of change in %s detected',
689 716 change_in_found)
690 717
691 718 # Finally there is a need for an update, in case of source change
692 719 # we create a new version, else just an update
693 720 if source_changed:
694 721 pull_request_version = self._create_version_from_snapshot(pull_request)
695 722 self._link_comments_to_version(pull_request_version)
696 723 else:
697 724 try:
698 725 ver = pull_request.versions[-1]
699 726 except IndexError:
700 727 ver = None
701 728
702 729 pull_request.pull_request_version_id = \
703 730 ver.pull_request_version_id if ver else None
704 731 pull_request_version = pull_request
705 732
706 733 try:
707 734 if target_ref_type in ('tag', 'branch', 'book'):
708 735 target_commit = target_repo.get_commit(target_ref_name)
709 736 else:
710 737 target_commit = target_repo.get_commit(target_ref_id)
711 738 except CommitDoesNotExistError:
712 739 return UpdateResponse(
713 740 executed=False,
714 741 reason=UpdateFailureReason.MISSING_TARGET_REF,
715 742 old=pull_request, new=None, changes=None,
716 743 source_changed=source_changed, target_changed=target_changed)
717 744
718 745 # re-compute commit ids
719 746 old_commit_ids = pull_request.revisions
720 747 pre_load = ["author", "branch", "date", "message"]
721 748 commit_ranges = target_repo.compare(
722 749 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
723 750 pre_load=pre_load)
724 751
725 752 ancestor = target_repo.get_common_ancestor(
726 753 target_commit.raw_id, source_commit.raw_id, source_repo)
727 754
728 755 pull_request.source_ref = '%s:%s:%s' % (
729 756 source_ref_type, source_ref_name, source_commit.raw_id)
730 757 pull_request.target_ref = '%s:%s:%s' % (
731 758 target_ref_type, target_ref_name, ancestor)
732 759
733 760 pull_request.revisions = [
734 761 commit.raw_id for commit in reversed(commit_ranges)]
735 762 pull_request.updated_on = datetime.datetime.now()
736 763 Session().add(pull_request)
737 764 new_commit_ids = pull_request.revisions
738 765
739 766 old_diff_data, new_diff_data = self._generate_update_diffs(
740 767 pull_request, pull_request_version)
741 768
742 769 # calculate commit and file changes
743 770 changes = self._calculate_commit_id_changes(
744 771 old_commit_ids, new_commit_ids)
745 772 file_changes = self._calculate_file_changes(
746 773 old_diff_data, new_diff_data)
747 774
748 775 # set comments as outdated if DIFFS changed
749 776 CommentsModel().outdate_comments(
750 777 pull_request, old_diff_data=old_diff_data,
751 778 new_diff_data=new_diff_data)
752 779
753 780 commit_changes = (changes.added or changes.removed)
754 781 file_node_changes = (
755 782 file_changes.added or file_changes.modified or file_changes.removed)
756 783 pr_has_changes = commit_changes or file_node_changes
757 784
758 785 # Add an automatic comment to the pull request, in case
759 786 # anything has changed
760 787 if pr_has_changes:
761 788 update_comment = CommentsModel().create(
762 789 text=self._render_update_message(changes, file_changes),
763 790 repo=pull_request.target_repo,
764 791 user=pull_request.author,
765 792 pull_request=pull_request,
766 793 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
767 794
768 795 # Update status to "Under Review" for added commits
769 796 for commit_id in changes.added:
770 797 ChangesetStatusModel().set_status(
771 798 repo=pull_request.source_repo,
772 799 status=ChangesetStatus.STATUS_UNDER_REVIEW,
773 800 comment=update_comment,
774 801 user=pull_request.author,
775 802 pull_request=pull_request,
776 803 revision=commit_id)
777 804
778 805 log.debug(
779 806 'Updated pull request %s, added_ids: %s, common_ids: %s, '
780 807 'removed_ids: %s', pull_request.pull_request_id,
781 808 changes.added, changes.common, changes.removed)
782 809 log.debug(
783 810 'Updated pull request with the following file changes: %s',
784 811 file_changes)
785 812
786 813 log.info(
787 814 "Updated pull request %s from commit %s to commit %s, "
788 815 "stored new version %s of this pull request.",
789 816 pull_request.pull_request_id, source_ref_id,
790 817 pull_request.source_ref_parts.commit_id,
791 818 pull_request_version.pull_request_version_id)
792 819 Session().commit()
793 820 self._trigger_pull_request_hook(
794 821 pull_request, pull_request.author, 'update')
795 822
796 823 return UpdateResponse(
797 824 executed=True, reason=UpdateFailureReason.NONE,
798 825 old=pull_request, new=pull_request_version, changes=changes,
799 826 source_changed=source_changed, target_changed=target_changed)
800 827
801 828 def _create_version_from_snapshot(self, pull_request):
802 829 version = PullRequestVersion()
803 830 version.title = pull_request.title
804 831 version.description = pull_request.description
805 832 version.status = pull_request.status
806 833 version.created_on = datetime.datetime.now()
807 834 version.updated_on = pull_request.updated_on
808 835 version.user_id = pull_request.user_id
809 836 version.source_repo = pull_request.source_repo
810 837 version.source_ref = pull_request.source_ref
811 838 version.target_repo = pull_request.target_repo
812 839 version.target_ref = pull_request.target_ref
813 840
814 841 version._last_merge_source_rev = pull_request._last_merge_source_rev
815 842 version._last_merge_target_rev = pull_request._last_merge_target_rev
816 843 version.last_merge_status = pull_request.last_merge_status
817 844 version.shadow_merge_ref = pull_request.shadow_merge_ref
818 845 version.merge_rev = pull_request.merge_rev
819 846 version.reviewer_data = pull_request.reviewer_data
820 847
821 848 version.revisions = pull_request.revisions
822 849 version.pull_request = pull_request
823 850 Session().add(version)
824 851 Session().flush()
825 852
826 853 return version
827 854
828 855 def _generate_update_diffs(self, pull_request, pull_request_version):
829 856
830 857 diff_context = (
831 858 self.DIFF_CONTEXT +
832 859 CommentsModel.needed_extra_diff_context())
833 860
834 861 source_repo = pull_request_version.source_repo
835 862 source_ref_id = pull_request_version.source_ref_parts.commit_id
836 863 target_ref_id = pull_request_version.target_ref_parts.commit_id
837 864 old_diff = self._get_diff_from_pr_or_version(
838 865 source_repo, source_ref_id, target_ref_id, context=diff_context)
839 866
840 867 source_repo = pull_request.source_repo
841 868 source_ref_id = pull_request.source_ref_parts.commit_id
842 869 target_ref_id = pull_request.target_ref_parts.commit_id
843 870
844 871 new_diff = self._get_diff_from_pr_or_version(
845 872 source_repo, source_ref_id, target_ref_id, context=diff_context)
846 873
847 874 old_diff_data = diffs.DiffProcessor(old_diff)
848 875 old_diff_data.prepare()
849 876 new_diff_data = diffs.DiffProcessor(new_diff)
850 877 new_diff_data.prepare()
851 878
852 879 return old_diff_data, new_diff_data
853 880
854 881 def _link_comments_to_version(self, pull_request_version):
855 882 """
856 883 Link all unlinked comments of this pull request to the given version.
857 884
858 885 :param pull_request_version: The `PullRequestVersion` to which
859 886 the comments shall be linked.
860 887
861 888 """
862 889 pull_request = pull_request_version.pull_request
863 890 comments = ChangesetComment.query()\
864 891 .filter(
865 892 # TODO: johbo: Should we query for the repo at all here?
866 893 # Pending decision on how comments of PRs are to be related
867 894 # to either the source repo, the target repo or no repo at all.
868 895 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
869 896 ChangesetComment.pull_request == pull_request,
870 897 ChangesetComment.pull_request_version == None)\
871 898 .order_by(ChangesetComment.comment_id.asc())
872 899
873 900 # TODO: johbo: Find out why this breaks if it is done in a bulk
874 901 # operation.
875 902 for comment in comments:
876 903 comment.pull_request_version_id = (
877 904 pull_request_version.pull_request_version_id)
878 905 Session().add(comment)
879 906
880 907 def _calculate_commit_id_changes(self, old_ids, new_ids):
881 908 added = [x for x in new_ids if x not in old_ids]
882 909 common = [x for x in new_ids if x in old_ids]
883 910 removed = [x for x in old_ids if x not in new_ids]
884 911 total = new_ids
885 912 return ChangeTuple(added, common, removed, total)
886 913
887 914 def _calculate_file_changes(self, old_diff_data, new_diff_data):
888 915
889 916 old_files = OrderedDict()
890 917 for diff_data in old_diff_data.parsed_diff:
891 918 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
892 919
893 920 added_files = []
894 921 modified_files = []
895 922 removed_files = []
896 923 for diff_data in new_diff_data.parsed_diff:
897 924 new_filename = diff_data['filename']
898 925 new_hash = md5_safe(diff_data['raw_diff'])
899 926
900 927 old_hash = old_files.get(new_filename)
901 928 if not old_hash:
902 929 # file is not present in old diff, means it's added
903 930 added_files.append(new_filename)
904 931 else:
905 932 if new_hash != old_hash:
906 933 modified_files.append(new_filename)
907 934 # now remove a file from old, since we have seen it already
908 935 del old_files[new_filename]
909 936
910 937 # removed files is when there are present in old, but not in NEW,
911 938 # since we remove old files that are present in new diff, left-overs
912 939 # if any should be the removed files
913 940 removed_files.extend(old_files.keys())
914 941
915 942 return FileChangeTuple(added_files, modified_files, removed_files)
916 943
917 944 def _render_update_message(self, changes, file_changes):
918 945 """
919 946 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
920 947 so it's always looking the same disregarding on which default
921 948 renderer system is using.
922 949
923 950 :param changes: changes named tuple
924 951 :param file_changes: file changes named tuple
925 952
926 953 """
927 954 new_status = ChangesetStatus.get_status_lbl(
928 955 ChangesetStatus.STATUS_UNDER_REVIEW)
929 956
930 957 changed_files = (
931 958 file_changes.added + file_changes.modified + file_changes.removed)
932 959
933 960 params = {
934 961 'under_review_label': new_status,
935 962 'added_commits': changes.added,
936 963 'removed_commits': changes.removed,
937 964 'changed_files': changed_files,
938 965 'added_files': file_changes.added,
939 966 'modified_files': file_changes.modified,
940 967 'removed_files': file_changes.removed,
941 968 }
942 969 renderer = RstTemplateRenderer()
943 970 return renderer.render('pull_request_update.mako', **params)
944 971
945 972 def edit(self, pull_request, title, description, user):
946 973 pull_request = self.__get_pull_request(pull_request)
947 974 old_data = pull_request.get_api_data(with_merge_state=False)
948 975 if pull_request.is_closed():
949 976 raise ValueError('This pull request is closed')
950 977 if title:
951 978 pull_request.title = title
952 979 pull_request.description = description
953 980 pull_request.updated_on = datetime.datetime.now()
954 981 Session().add(pull_request)
955 982 self._log_audit_action(
956 983 'repo.pull_request.edit', {'old_data': old_data},
957 984 user, pull_request)
958 985
959 986 def update_reviewers(self, pull_request, reviewer_data, user):
960 987 """
961 988 Update the reviewers in the pull request
962 989
963 990 :param pull_request: the pr to update
964 991 :param reviewer_data: list of tuples
965 [(user, ['reason1', 'reason2'], mandatory_flag)]
992 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
966 993 """
967 994 pull_request = self.__get_pull_request(pull_request)
968 995 if pull_request.is_closed():
969 996 raise ValueError('This pull request is closed')
970 997
971 998 reviewers = {}
972 for user_id, reasons, mandatory in reviewer_data:
999 for user_id, reasons, mandatory, rules in reviewer_data:
973 1000 if isinstance(user_id, (int, basestring)):
974 1001 user_id = self._get_user(user_id).user_id
975 1002 reviewers[user_id] = {
976 1003 'reasons': reasons, 'mandatory': mandatory}
977 1004
978 1005 reviewers_ids = set(reviewers.keys())
979 1006 current_reviewers = PullRequestReviewers.query()\
980 1007 .filter(PullRequestReviewers.pull_request ==
981 1008 pull_request).all()
982 1009 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
983 1010
984 1011 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
985 1012 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
986 1013
987 1014 log.debug("Adding %s reviewers", ids_to_add)
988 1015 log.debug("Removing %s reviewers", ids_to_remove)
989 1016 changed = False
990 1017 for uid in ids_to_add:
991 1018 changed = True
992 1019 _usr = self._get_user(uid)
993 1020 reviewer = PullRequestReviewers()
994 1021 reviewer.user = _usr
995 1022 reviewer.pull_request = pull_request
996 1023 reviewer.reasons = reviewers[uid]['reasons']
997 1024 # NOTE(marcink): mandatory shouldn't be changed now
998 1025 # reviewer.mandatory = reviewers[uid]['reasons']
999 1026 Session().add(reviewer)
1000 1027 self._log_audit_action(
1001 1028 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1002 1029 user, pull_request)
1003 1030
1004 1031 for uid in ids_to_remove:
1005 1032 changed = True
1006 1033 reviewers = PullRequestReviewers.query()\
1007 1034 .filter(PullRequestReviewers.user_id == uid,
1008 1035 PullRequestReviewers.pull_request == pull_request)\
1009 1036 .all()
1010 1037 # use .all() in case we accidentally added the same person twice
1011 1038 # this CAN happen due to the lack of DB checks
1012 1039 for obj in reviewers:
1013 1040 old_data = obj.get_dict()
1014 1041 Session().delete(obj)
1015 1042 self._log_audit_action(
1016 1043 'repo.pull_request.reviewer.delete',
1017 1044 {'old_data': old_data}, user, pull_request)
1018 1045
1019 1046 if changed:
1020 1047 pull_request.updated_on = datetime.datetime.now()
1021 1048 Session().add(pull_request)
1022 1049
1023 1050 self.notify_reviewers(pull_request, ids_to_add)
1024 1051 return ids_to_add, ids_to_remove
1025 1052
1026 1053 def get_url(self, pull_request, request=None, permalink=False):
1027 1054 if not request:
1028 1055 request = get_current_request()
1029 1056
1030 1057 if permalink:
1031 1058 return request.route_url(
1032 1059 'pull_requests_global',
1033 1060 pull_request_id=pull_request.pull_request_id,)
1034 1061 else:
1035 1062 return request.route_url('pullrequest_show',
1036 1063 repo_name=safe_str(pull_request.target_repo.repo_name),
1037 1064 pull_request_id=pull_request.pull_request_id,)
1038 1065
1039 1066 def get_shadow_clone_url(self, pull_request):
1040 1067 """
1041 1068 Returns qualified url pointing to the shadow repository. If this pull
1042 1069 request is closed there is no shadow repository and ``None`` will be
1043 1070 returned.
1044 1071 """
1045 1072 if pull_request.is_closed():
1046 1073 return None
1047 1074 else:
1048 1075 pr_url = urllib.unquote(self.get_url(pull_request))
1049 1076 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1050 1077
1051 1078 def notify_reviewers(self, pull_request, reviewers_ids):
1052 1079 # notification to reviewers
1053 1080 if not reviewers_ids:
1054 1081 return
1055 1082
1056 1083 pull_request_obj = pull_request
1057 1084 # get the current participants of this pull request
1058 1085 recipients = reviewers_ids
1059 1086 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1060 1087
1061 1088 pr_source_repo = pull_request_obj.source_repo
1062 1089 pr_target_repo = pull_request_obj.target_repo
1063 1090
1064 1091 pr_url = h.route_url('pullrequest_show',
1065 1092 repo_name=pr_target_repo.repo_name,
1066 1093 pull_request_id=pull_request_obj.pull_request_id,)
1067 1094
1068 1095 # set some variables for email notification
1069 1096 pr_target_repo_url = h.route_url(
1070 1097 'repo_summary', repo_name=pr_target_repo.repo_name)
1071 1098
1072 1099 pr_source_repo_url = h.route_url(
1073 1100 'repo_summary', repo_name=pr_source_repo.repo_name)
1074 1101
1075 1102 # pull request specifics
1076 1103 pull_request_commits = [
1077 1104 (x.raw_id, x.message)
1078 1105 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1079 1106
1080 1107 kwargs = {
1081 1108 'user': pull_request.author,
1082 1109 'pull_request': pull_request_obj,
1083 1110 'pull_request_commits': pull_request_commits,
1084 1111
1085 1112 'pull_request_target_repo': pr_target_repo,
1086 1113 'pull_request_target_repo_url': pr_target_repo_url,
1087 1114
1088 1115 'pull_request_source_repo': pr_source_repo,
1089 1116 'pull_request_source_repo_url': pr_source_repo_url,
1090 1117
1091 1118 'pull_request_url': pr_url,
1092 1119 }
1093 1120
1094 1121 # pre-generate the subject for notification itself
1095 1122 (subject,
1096 1123 _h, _e, # we don't care about those
1097 1124 body_plaintext) = EmailNotificationModel().render_email(
1098 1125 notification_type, **kwargs)
1099 1126
1100 1127 # create notification objects, and emails
1101 1128 NotificationModel().create(
1102 1129 created_by=pull_request.author,
1103 1130 notification_subject=subject,
1104 1131 notification_body=body_plaintext,
1105 1132 notification_type=notification_type,
1106 1133 recipients=recipients,
1107 1134 email_kwargs=kwargs,
1108 1135 )
1109 1136
1110 1137 def delete(self, pull_request, user):
1111 1138 pull_request = self.__get_pull_request(pull_request)
1112 1139 old_data = pull_request.get_api_data(with_merge_state=False)
1113 1140 self._cleanup_merge_workspace(pull_request)
1114 1141 self._log_audit_action(
1115 1142 'repo.pull_request.delete', {'old_data': old_data},
1116 1143 user, pull_request)
1117 1144 Session().delete(pull_request)
1118 1145
1119 1146 def close_pull_request(self, pull_request, user):
1120 1147 pull_request = self.__get_pull_request(pull_request)
1121 1148 self._cleanup_merge_workspace(pull_request)
1122 1149 pull_request.status = PullRequest.STATUS_CLOSED
1123 1150 pull_request.updated_on = datetime.datetime.now()
1124 1151 Session().add(pull_request)
1125 1152 self._trigger_pull_request_hook(
1126 1153 pull_request, pull_request.author, 'close')
1127 1154
1128 1155 pr_data = pull_request.get_api_data(with_merge_state=False)
1129 1156 self._log_audit_action(
1130 1157 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1131 1158
1132 1159 def close_pull_request_with_comment(
1133 1160 self, pull_request, user, repo, message=None):
1134 1161
1135 1162 pull_request_review_status = pull_request.calculated_review_status()
1136 1163
1137 1164 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1138 1165 # approved only if we have voting consent
1139 1166 status = ChangesetStatus.STATUS_APPROVED
1140 1167 else:
1141 1168 status = ChangesetStatus.STATUS_REJECTED
1142 1169 status_lbl = ChangesetStatus.get_status_lbl(status)
1143 1170
1144 1171 default_message = (
1145 1172 'Closing with status change {transition_icon} {status}.'
1146 1173 ).format(transition_icon='>', status=status_lbl)
1147 1174 text = message or default_message
1148 1175
1149 1176 # create a comment, and link it to new status
1150 1177 comment = CommentsModel().create(
1151 1178 text=text,
1152 1179 repo=repo.repo_id,
1153 1180 user=user.user_id,
1154 1181 pull_request=pull_request.pull_request_id,
1155 1182 status_change=status_lbl,
1156 1183 status_change_type=status,
1157 1184 closing_pr=True
1158 1185 )
1159 1186
1160 1187 # calculate old status before we change it
1161 1188 old_calculated_status = pull_request.calculated_review_status()
1162 1189 ChangesetStatusModel().set_status(
1163 1190 repo.repo_id,
1164 1191 status,
1165 1192 user.user_id,
1166 1193 comment=comment,
1167 1194 pull_request=pull_request.pull_request_id
1168 1195 )
1169 1196
1170 1197 Session().flush()
1171 1198 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1172 1199 # we now calculate the status of pull request again, and based on that
1173 1200 # calculation trigger status change. This might happen in cases
1174 1201 # that non-reviewer admin closes a pr, which means his vote doesn't
1175 1202 # change the status, while if he's a reviewer this might change it.
1176 1203 calculated_status = pull_request.calculated_review_status()
1177 1204 if old_calculated_status != calculated_status:
1178 1205 self._trigger_pull_request_hook(
1179 1206 pull_request, user, 'review_status_change')
1180 1207
1181 1208 # finally close the PR
1182 1209 PullRequestModel().close_pull_request(
1183 1210 pull_request.pull_request_id, user)
1184 1211
1185 1212 return comment, status
1186 1213
1187 1214 def merge_status(self, pull_request, translator=None):
1188 1215 _ = translator or get_current_request().translate
1189 1216
1190 1217 if not self._is_merge_enabled(pull_request):
1191 1218 return False, _('Server-side pull request merging is disabled.')
1192 1219 if pull_request.is_closed():
1193 1220 return False, _('This pull request is closed.')
1194 1221 merge_possible, msg = self._check_repo_requirements(
1195 1222 target=pull_request.target_repo, source=pull_request.source_repo,
1196 1223 translator=_)
1197 1224 if not merge_possible:
1198 1225 return merge_possible, msg
1199 1226
1200 1227 try:
1201 1228 resp = self._try_merge(pull_request)
1202 1229 log.debug("Merge response: %s", resp)
1203 1230 status = resp.possible, self.merge_status_message(
1204 1231 resp.failure_reason)
1205 1232 except NotImplementedError:
1206 1233 status = False, _('Pull request merging is not supported.')
1207 1234
1208 1235 return status
1209 1236
1210 1237 def _check_repo_requirements(self, target, source, translator):
1211 1238 """
1212 1239 Check if `target` and `source` have compatible requirements.
1213 1240
1214 1241 Currently this is just checking for largefiles.
1215 1242 """
1216 1243 _ = translator
1217 1244 target_has_largefiles = self._has_largefiles(target)
1218 1245 source_has_largefiles = self._has_largefiles(source)
1219 1246 merge_possible = True
1220 1247 message = u''
1221 1248
1222 1249 if target_has_largefiles != source_has_largefiles:
1223 1250 merge_possible = False
1224 1251 if source_has_largefiles:
1225 1252 message = _(
1226 1253 'Target repository large files support is disabled.')
1227 1254 else:
1228 1255 message = _(
1229 1256 'Source repository large files support is disabled.')
1230 1257
1231 1258 return merge_possible, message
1232 1259
1233 1260 def _has_largefiles(self, repo):
1234 1261 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1235 1262 'extensions', 'largefiles')
1236 1263 return largefiles_ui and largefiles_ui[0].active
1237 1264
1238 1265 def _try_merge(self, pull_request):
1239 1266 """
1240 1267 Try to merge the pull request and return the merge status.
1241 1268 """
1242 1269 log.debug(
1243 1270 "Trying out if the pull request %s can be merged.",
1244 1271 pull_request.pull_request_id)
1245 1272 target_vcs = pull_request.target_repo.scm_instance()
1246 1273
1247 1274 # Refresh the target reference.
1248 1275 try:
1249 1276 target_ref = self._refresh_reference(
1250 1277 pull_request.target_ref_parts, target_vcs)
1251 1278 except CommitDoesNotExistError:
1252 1279 merge_state = MergeResponse(
1253 1280 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1254 1281 return merge_state
1255 1282
1256 1283 target_locked = pull_request.target_repo.locked
1257 1284 if target_locked and target_locked[0]:
1258 1285 log.debug("The target repository is locked.")
1259 1286 merge_state = MergeResponse(
1260 1287 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1261 1288 elif self._needs_merge_state_refresh(pull_request, target_ref):
1262 1289 log.debug("Refreshing the merge status of the repository.")
1263 1290 merge_state = self._refresh_merge_state(
1264 1291 pull_request, target_vcs, target_ref)
1265 1292 else:
1266 1293 possible = pull_request.\
1267 1294 last_merge_status == MergeFailureReason.NONE
1268 1295 merge_state = MergeResponse(
1269 1296 possible, False, None, pull_request.last_merge_status)
1270 1297
1271 1298 return merge_state
1272 1299
1273 1300 def _refresh_reference(self, reference, vcs_repository):
1274 1301 if reference.type in ('branch', 'book'):
1275 1302 name_or_id = reference.name
1276 1303 else:
1277 1304 name_or_id = reference.commit_id
1278 1305 refreshed_commit = vcs_repository.get_commit(name_or_id)
1279 1306 refreshed_reference = Reference(
1280 1307 reference.type, reference.name, refreshed_commit.raw_id)
1281 1308 return refreshed_reference
1282 1309
1283 1310 def _needs_merge_state_refresh(self, pull_request, target_reference):
1284 1311 return not(
1285 1312 pull_request.revisions and
1286 1313 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1287 1314 target_reference.commit_id == pull_request._last_merge_target_rev)
1288 1315
1289 1316 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1290 1317 workspace_id = self._workspace_id(pull_request)
1291 1318 source_vcs = pull_request.source_repo.scm_instance()
1292 1319 use_rebase = self._use_rebase_for_merging(pull_request)
1293 1320 close_branch = self._close_branch_before_merging(pull_request)
1294 1321 merge_state = target_vcs.merge(
1295 1322 target_reference, source_vcs, pull_request.source_ref_parts,
1296 1323 workspace_id, dry_run=True, use_rebase=use_rebase,
1297 1324 close_branch=close_branch)
1298 1325
1299 1326 # Do not store the response if there was an unknown error.
1300 1327 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1301 1328 pull_request._last_merge_source_rev = \
1302 1329 pull_request.source_ref_parts.commit_id
1303 1330 pull_request._last_merge_target_rev = target_reference.commit_id
1304 1331 pull_request.last_merge_status = merge_state.failure_reason
1305 1332 pull_request.shadow_merge_ref = merge_state.merge_ref
1306 1333 Session().add(pull_request)
1307 1334 Session().commit()
1308 1335
1309 1336 return merge_state
1310 1337
1311 1338 def _workspace_id(self, pull_request):
1312 1339 workspace_id = 'pr-%s' % pull_request.pull_request_id
1313 1340 return workspace_id
1314 1341
1315 1342 def merge_status_message(self, status_code):
1316 1343 """
1317 1344 Return a human friendly error message for the given merge status code.
1318 1345 """
1319 1346 return self.MERGE_STATUS_MESSAGES[status_code]
1320 1347
1321 1348 def generate_repo_data(self, repo, commit_id=None, branch=None,
1322 1349 bookmark=None, translator=None):
1323 1350 from rhodecode.model.repo import RepoModel
1324 1351
1325 1352 all_refs, selected_ref = \
1326 1353 self._get_repo_pullrequest_sources(
1327 1354 repo.scm_instance(), commit_id=commit_id,
1328 1355 branch=branch, bookmark=bookmark, translator=translator)
1329 1356
1330 1357 refs_select2 = []
1331 1358 for element in all_refs:
1332 1359 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1333 1360 refs_select2.append({'text': element[1], 'children': children})
1334 1361
1335 1362 return {
1336 1363 'user': {
1337 1364 'user_id': repo.user.user_id,
1338 1365 'username': repo.user.username,
1339 1366 'firstname': repo.user.first_name,
1340 1367 'lastname': repo.user.last_name,
1341 1368 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1342 1369 },
1343 1370 'name': repo.repo_name,
1344 1371 'link': RepoModel().get_url(repo),
1345 1372 'description': h.chop_at_smart(repo.description_safe, '\n'),
1346 1373 'refs': {
1347 1374 'all_refs': all_refs,
1348 1375 'selected_ref': selected_ref,
1349 1376 'select2_refs': refs_select2
1350 1377 }
1351 1378 }
1352 1379
1353 1380 def generate_pullrequest_title(self, source, source_ref, target):
1354 1381 return u'{source}#{at_ref} to {target}'.format(
1355 1382 source=source,
1356 1383 at_ref=source_ref,
1357 1384 target=target,
1358 1385 )
1359 1386
1360 1387 def _cleanup_merge_workspace(self, pull_request):
1361 1388 # Merging related cleanup
1362 1389 target_scm = pull_request.target_repo.scm_instance()
1363 1390 workspace_id = 'pr-%s' % pull_request.pull_request_id
1364 1391
1365 1392 try:
1366 1393 target_scm.cleanup_merge_workspace(workspace_id)
1367 1394 except NotImplementedError:
1368 1395 pass
1369 1396
1370 1397 def _get_repo_pullrequest_sources(
1371 1398 self, repo, commit_id=None, branch=None, bookmark=None,
1372 1399 translator=None):
1373 1400 """
1374 1401 Return a structure with repo's interesting commits, suitable for
1375 1402 the selectors in pullrequest controller
1376 1403
1377 1404 :param commit_id: a commit that must be in the list somehow
1378 1405 and selected by default
1379 1406 :param branch: a branch that must be in the list and selected
1380 1407 by default - even if closed
1381 1408 :param bookmark: a bookmark that must be in the list and selected
1382 1409 """
1383 1410 _ = translator or get_current_request().translate
1384 1411
1385 1412 commit_id = safe_str(commit_id) if commit_id else None
1386 1413 branch = safe_str(branch) if branch else None
1387 1414 bookmark = safe_str(bookmark) if bookmark else None
1388 1415
1389 1416 selected = None
1390 1417
1391 1418 # order matters: first source that has commit_id in it will be selected
1392 1419 sources = []
1393 1420 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1394 1421 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1395 1422
1396 1423 if commit_id:
1397 1424 ref_commit = (h.short_id(commit_id), commit_id)
1398 1425 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1399 1426
1400 1427 sources.append(
1401 1428 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1402 1429 )
1403 1430
1404 1431 groups = []
1405 1432 for group_key, ref_list, group_name, match in sources:
1406 1433 group_refs = []
1407 1434 for ref_name, ref_id in ref_list:
1408 1435 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1409 1436 group_refs.append((ref_key, ref_name))
1410 1437
1411 1438 if not selected:
1412 1439 if set([commit_id, match]) & set([ref_id, ref_name]):
1413 1440 selected = ref_key
1414 1441
1415 1442 if group_refs:
1416 1443 groups.append((group_refs, group_name))
1417 1444
1418 1445 if not selected:
1419 1446 ref = commit_id or branch or bookmark
1420 1447 if ref:
1421 1448 raise CommitDoesNotExistError(
1422 1449 'No commit refs could be found matching: %s' % ref)
1423 1450 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1424 1451 selected = 'branch:%s:%s' % (
1425 1452 repo.DEFAULT_BRANCH_NAME,
1426 1453 repo.branches[repo.DEFAULT_BRANCH_NAME]
1427 1454 )
1428 1455 elif repo.commit_ids:
1429 1456 # make the user select in this case
1430 1457 selected = None
1431 1458 else:
1432 1459 raise EmptyRepositoryError()
1433 1460 return groups, selected
1434 1461
1435 1462 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1436 1463 return self._get_diff_from_pr_or_version(
1437 1464 source_repo, source_ref_id, target_ref_id, context=context)
1438 1465
1439 1466 def _get_diff_from_pr_or_version(
1440 1467 self, source_repo, source_ref_id, target_ref_id, context):
1441 1468 target_commit = source_repo.get_commit(
1442 1469 commit_id=safe_str(target_ref_id))
1443 1470 source_commit = source_repo.get_commit(
1444 1471 commit_id=safe_str(source_ref_id))
1445 1472 if isinstance(source_repo, Repository):
1446 1473 vcs_repo = source_repo.scm_instance()
1447 1474 else:
1448 1475 vcs_repo = source_repo
1449 1476
1450 1477 # TODO: johbo: In the context of an update, we cannot reach
1451 1478 # the old commit anymore with our normal mechanisms. It needs
1452 1479 # some sort of special support in the vcs layer to avoid this
1453 1480 # workaround.
1454 1481 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1455 1482 vcs_repo.alias == 'git'):
1456 1483 source_commit.raw_id = safe_str(source_ref_id)
1457 1484
1458 1485 log.debug('calculating diff between '
1459 1486 'source_ref:%s and target_ref:%s for repo `%s`',
1460 1487 target_ref_id, source_ref_id,
1461 1488 safe_unicode(vcs_repo.path))
1462 1489
1463 1490 vcs_diff = vcs_repo.get_diff(
1464 1491 commit1=target_commit, commit2=source_commit, context=context)
1465 1492 return vcs_diff
1466 1493
1467 1494 def _is_merge_enabled(self, pull_request):
1468 1495 return self._get_general_setting(
1469 1496 pull_request, 'rhodecode_pr_merge_enabled')
1470 1497
1471 1498 def _use_rebase_for_merging(self, pull_request):
1472 1499 repo_type = pull_request.target_repo.repo_type
1473 1500 if repo_type == 'hg':
1474 1501 return self._get_general_setting(
1475 1502 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1476 1503 elif repo_type == 'git':
1477 1504 return self._get_general_setting(
1478 1505 pull_request, 'rhodecode_git_use_rebase_for_merging')
1479 1506
1480 1507 return False
1481 1508
1482 1509 def _close_branch_before_merging(self, pull_request):
1483 1510 repo_type = pull_request.target_repo.repo_type
1484 1511 if repo_type == 'hg':
1485 1512 return self._get_general_setting(
1486 1513 pull_request, 'rhodecode_hg_close_branch_before_merging')
1487 1514 elif repo_type == 'git':
1488 1515 return self._get_general_setting(
1489 1516 pull_request, 'rhodecode_git_close_branch_before_merging')
1490 1517
1491 1518 return False
1492 1519
1493 1520 def _get_general_setting(self, pull_request, settings_key, default=False):
1494 1521 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1495 1522 settings = settings_model.get_general_settings()
1496 1523 return settings.get(settings_key, default)
1497 1524
1498 1525 def _log_audit_action(self, action, action_data, user, pull_request):
1499 1526 audit_logger.store(
1500 1527 action=action,
1501 1528 action_data=action_data,
1502 1529 user=user,
1503 1530 repo=pull_request.target_repo)
1504 1531
1505 1532 def get_reviewer_functions(self):
1506 1533 """
1507 1534 Fetches functions for validation and fetching default reviewers.
1508 1535 If available we use the EE package, else we fallback to CE
1509 1536 package functions
1510 1537 """
1511 1538 try:
1512 1539 from rc_reviewers.utils import get_default_reviewers_data
1513 1540 from rc_reviewers.utils import validate_default_reviewers
1514 1541 except ImportError:
1515 1542 from rhodecode.apps.repository.utils import \
1516 1543 get_default_reviewers_data
1517 1544 from rhodecode.apps.repository.utils import \
1518 1545 validate_default_reviewers
1519 1546
1520 1547 return get_default_reviewers_data, validate_default_reviewers
1521 1548
1522 1549
1523 1550 class MergeCheck(object):
1524 1551 """
1525 1552 Perform Merge Checks and returns a check object which stores information
1526 1553 about merge errors, and merge conditions
1527 1554 """
1528 1555 TODO_CHECK = 'todo'
1529 1556 PERM_CHECK = 'perm'
1530 1557 REVIEW_CHECK = 'review'
1531 1558 MERGE_CHECK = 'merge'
1532 1559
1533 1560 def __init__(self):
1534 1561 self.review_status = None
1535 1562 self.merge_possible = None
1536 1563 self.merge_msg = ''
1537 1564 self.failed = None
1538 1565 self.errors = []
1539 1566 self.error_details = OrderedDict()
1540 1567
1541 1568 def push_error(self, error_type, message, error_key, details):
1542 1569 self.failed = True
1543 1570 self.errors.append([error_type, message])
1544 1571 self.error_details[error_key] = dict(
1545 1572 details=details,
1546 1573 error_type=error_type,
1547 1574 message=message
1548 1575 )
1549 1576
1550 1577 @classmethod
1551 1578 def validate(cls, pull_request, user, translator, fail_early=False):
1552 1579 _ = translator
1553 1580 merge_check = cls()
1554 1581
1555 1582 # permissions to merge
1556 1583 user_allowed_to_merge = PullRequestModel().check_user_merge(
1557 1584 pull_request, user)
1558 1585 if not user_allowed_to_merge:
1559 1586 log.debug("MergeCheck: cannot merge, approval is pending.")
1560 1587
1561 1588 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1562 1589 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1563 1590 if fail_early:
1564 1591 return merge_check
1565 1592
1566 1593 # review status, must be always present
1567 1594 review_status = pull_request.calculated_review_status()
1568 1595 merge_check.review_status = review_status
1569 1596
1570 1597 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1571 1598 if not status_approved:
1572 1599 log.debug("MergeCheck: cannot merge, approval is pending.")
1573 1600
1574 1601 msg = _('Pull request reviewer approval is pending.')
1575 1602
1576 1603 merge_check.push_error(
1577 1604 'warning', msg, cls.REVIEW_CHECK, review_status)
1578 1605
1579 1606 if fail_early:
1580 1607 return merge_check
1581 1608
1582 1609 # left over TODOs
1583 1610 todos = CommentsModel().get_unresolved_todos(pull_request)
1584 1611 if todos:
1585 1612 log.debug("MergeCheck: cannot merge, {} "
1586 1613 "unresolved todos left.".format(len(todos)))
1587 1614
1588 1615 if len(todos) == 1:
1589 1616 msg = _('Cannot merge, {} TODO still not resolved.').format(
1590 1617 len(todos))
1591 1618 else:
1592 1619 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1593 1620 len(todos))
1594 1621
1595 1622 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1596 1623
1597 1624 if fail_early:
1598 1625 return merge_check
1599 1626
1600 1627 # merge possible
1601 1628 merge_status, msg = PullRequestModel().merge_status(
1602 1629 pull_request, translator=translator)
1603 1630 merge_check.merge_possible = merge_status
1604 1631 merge_check.merge_msg = msg
1605 1632 if not merge_status:
1606 1633 log.debug(
1607 1634 "MergeCheck: cannot merge, pull request merge not possible.")
1608 1635 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1609 1636
1610 1637 if fail_early:
1611 1638 return merge_check
1612 1639
1613 1640 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1614 1641 return merge_check
1615 1642
1616 1643 @classmethod
1617 1644 def get_merge_conditions(cls, pull_request, translator):
1618 1645 _ = translator
1619 1646 merge_details = {}
1620 1647
1621 1648 model = PullRequestModel()
1622 1649 use_rebase = model._use_rebase_for_merging(pull_request)
1623 1650
1624 1651 if use_rebase:
1625 1652 merge_details['merge_strategy'] = dict(
1626 1653 details={},
1627 1654 message=_('Merge strategy: rebase')
1628 1655 )
1629 1656 else:
1630 1657 merge_details['merge_strategy'] = dict(
1631 1658 details={},
1632 1659 message=_('Merge strategy: explicit merge commit')
1633 1660 )
1634 1661
1635 1662 close_branch = model._close_branch_before_merging(pull_request)
1636 1663 if close_branch:
1637 1664 repo_type = pull_request.target_repo.repo_type
1638 1665 if repo_type == 'hg':
1639 1666 close_msg = _('Source branch will be closed after merge.')
1640 1667 elif repo_type == 'git':
1641 1668 close_msg = _('Source branch will be deleted after merge.')
1642 1669
1643 1670 merge_details['close_branch'] = dict(
1644 1671 details={},
1645 1672 message=close_msg
1646 1673 )
1647 1674
1648 1675 return merge_details
1649 1676
1650 1677 ChangeTuple = collections.namedtuple(
1651 1678 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1652 1679
1653 1680 FileChangeTuple = collections.namedtuple(
1654 1681 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,913 +1,914 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 users model for RhodeCode
23 23 """
24 24
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28 import ipaddress
29 29
30 30 from pyramid.threadlocal import get_current_request
31 31 from sqlalchemy.exc import DatabaseError
32 32
33 33 from rhodecode import events
34 34 from rhodecode.lib.user_log_filter import user_log_filter
35 35 from rhodecode.lib.utils2 import (
36 36 safe_unicode, get_current_rhodecode_user, action_logger_generic,
37 37 AttributeDict, str2bool)
38 38 from rhodecode.lib.exceptions import (
39 39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
40 40 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
41 41 from rhodecode.lib.caching_query import FromCache
42 42 from rhodecode.model import BaseModel
43 43 from rhodecode.model.auth_token import AuthTokenModel
44 44 from rhodecode.model.db import (
45 45 _hash_key, true, false, or_, joinedload, User, UserToPerm,
46 46 UserEmailMap, UserIpMap, UserLog)
47 47 from rhodecode.model.meta import Session
48 48 from rhodecode.model.repo_group import RepoGroupModel
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class UserModel(BaseModel):
55 55 cls = User
56 56
57 57 def get(self, user_id, cache=False):
58 58 user = self.sa.query(User)
59 59 if cache:
60 60 user = user.options(
61 61 FromCache("sql_cache_short", "get_user_%s" % user_id))
62 62 return user.get(user_id)
63 63
64 64 def get_user(self, user):
65 65 return self._get_user(user)
66 66
67 67 def _serialize_user(self, user):
68 68 import rhodecode.lib.helpers as h
69 69
70 70 return {
71 71 'id': user.user_id,
72 72 'first_name': user.first_name,
73 73 'last_name': user.last_name,
74 74 'username': user.username,
75 75 'email': user.email,
76 76 'icon_link': h.gravatar_url(user.email, 30),
77 'profile_link': h.link_to_user(user),
77 78 'value_display': h.escape(h.person(user)),
78 79 'value': user.username,
79 80 'value_type': 'user',
80 81 'active': user.active,
81 82 }
82 83
83 84 def get_users(self, name_contains=None, limit=20, only_active=True):
84 85
85 86 query = self.sa.query(User)
86 87 if only_active:
87 88 query = query.filter(User.active == true())
88 89
89 90 if name_contains:
90 91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
91 92 query = query.filter(
92 93 or_(
93 94 User.name.ilike(ilike_expression),
94 95 User.lastname.ilike(ilike_expression),
95 96 User.username.ilike(ilike_expression)
96 97 )
97 98 )
98 99 query = query.limit(limit)
99 100 users = query.all()
100 101
101 102 _users = [
102 103 self._serialize_user(user) for user in users
103 104 ]
104 105 return _users
105 106
106 107 def get_by_username(self, username, cache=False, case_insensitive=False):
107 108
108 109 if case_insensitive:
109 110 user = self.sa.query(User).filter(User.username.ilike(username))
110 111 else:
111 112 user = self.sa.query(User)\
112 113 .filter(User.username == username)
113 114 if cache:
114 115 name_key = _hash_key(username)
115 116 user = user.options(
116 117 FromCache("sql_cache_short", "get_user_%s" % name_key))
117 118 return user.scalar()
118 119
119 120 def get_by_email(self, email, cache=False, case_insensitive=False):
120 121 return User.get_by_email(email, case_insensitive, cache)
121 122
122 123 def get_by_auth_token(self, auth_token, cache=False):
123 124 return User.get_by_auth_token(auth_token, cache)
124 125
125 126 def get_active_user_count(self, cache=False):
126 127 qry = User.query().filter(
127 128 User.active == true()).filter(
128 129 User.username != User.DEFAULT_USER)
129 130 if cache:
130 131 qry = qry.options(
131 132 FromCache("sql_cache_short", "get_active_users"))
132 133 return qry.count()
133 134
134 135 def create(self, form_data, cur_user=None):
135 136 if not cur_user:
136 137 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
137 138
138 139 user_data = {
139 140 'username': form_data['username'],
140 141 'password': form_data['password'],
141 142 'email': form_data['email'],
142 143 'firstname': form_data['firstname'],
143 144 'lastname': form_data['lastname'],
144 145 'active': form_data['active'],
145 146 'extern_type': form_data['extern_type'],
146 147 'extern_name': form_data['extern_name'],
147 148 'admin': False,
148 149 'cur_user': cur_user
149 150 }
150 151
151 152 if 'create_repo_group' in form_data:
152 153 user_data['create_repo_group'] = str2bool(
153 154 form_data.get('create_repo_group'))
154 155
155 156 try:
156 157 if form_data.get('password_change'):
157 158 user_data['force_password_change'] = True
158 159 return UserModel().create_or_update(**user_data)
159 160 except Exception:
160 161 log.error(traceback.format_exc())
161 162 raise
162 163
163 164 def update_user(self, user, skip_attrs=None, **kwargs):
164 165 from rhodecode.lib.auth import get_crypt_password
165 166
166 167 user = self._get_user(user)
167 168 if user.username == User.DEFAULT_USER:
168 169 raise DefaultUserException(
169 170 "You can't edit this user (`%(username)s`) since it's "
170 171 "crucial for entire application" % {
171 172 'username': user.username})
172 173
173 174 # first store only defaults
174 175 user_attrs = {
175 176 'updating_user_id': user.user_id,
176 177 'username': user.username,
177 178 'password': user.password,
178 179 'email': user.email,
179 180 'firstname': user.name,
180 181 'lastname': user.lastname,
181 182 'active': user.active,
182 183 'admin': user.admin,
183 184 'extern_name': user.extern_name,
184 185 'extern_type': user.extern_type,
185 186 'language': user.user_data.get('language')
186 187 }
187 188
188 189 # in case there's new_password, that comes from form, use it to
189 190 # store password
190 191 if kwargs.get('new_password'):
191 192 kwargs['password'] = kwargs['new_password']
192 193
193 194 # cleanups, my_account password change form
194 195 kwargs.pop('current_password', None)
195 196 kwargs.pop('new_password', None)
196 197
197 198 # cleanups, user edit password change form
198 199 kwargs.pop('password_confirmation', None)
199 200 kwargs.pop('password_change', None)
200 201
201 202 # create repo group on user creation
202 203 kwargs.pop('create_repo_group', None)
203 204
204 205 # legacy forms send name, which is the firstname
205 206 firstname = kwargs.pop('name', None)
206 207 if firstname:
207 208 kwargs['firstname'] = firstname
208 209
209 210 for k, v in kwargs.items():
210 211 # skip if we don't want to update this
211 212 if skip_attrs and k in skip_attrs:
212 213 continue
213 214
214 215 user_attrs[k] = v
215 216
216 217 try:
217 218 return self.create_or_update(**user_attrs)
218 219 except Exception:
219 220 log.error(traceback.format_exc())
220 221 raise
221 222
222 223 def create_or_update(
223 224 self, username, password, email, firstname='', lastname='',
224 225 active=True, admin=False, extern_type=None, extern_name=None,
225 226 cur_user=None, plugin=None, force_password_change=False,
226 227 allow_to_create_user=True, create_repo_group=None,
227 228 updating_user_id=None, language=None, strict_creation_check=True):
228 229 """
229 230 Creates a new instance if not found, or updates current one
230 231
231 232 :param username:
232 233 :param password:
233 234 :param email:
234 235 :param firstname:
235 236 :param lastname:
236 237 :param active:
237 238 :param admin:
238 239 :param extern_type:
239 240 :param extern_name:
240 241 :param cur_user:
241 242 :param plugin: optional plugin this method was called from
242 243 :param force_password_change: toggles new or existing user flag
243 244 for password change
244 245 :param allow_to_create_user: Defines if the method can actually create
245 246 new users
246 247 :param create_repo_group: Defines if the method should also
247 248 create an repo group with user name, and owner
248 249 :param updating_user_id: if we set it up this is the user we want to
249 250 update this allows to editing username.
250 251 :param language: language of user from interface.
251 252
252 253 :returns: new User object with injected `is_new_user` attribute.
253 254 """
254 255
255 256 if not cur_user:
256 257 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
257 258
258 259 from rhodecode.lib.auth import (
259 260 get_crypt_password, check_password, generate_auth_token)
260 261 from rhodecode.lib.hooks_base import (
261 262 log_create_user, check_allowed_create_user)
262 263
263 264 def _password_change(new_user, password):
264 265 old_password = new_user.password or ''
265 266 # empty password
266 267 if not old_password:
267 268 return False
268 269
269 270 # password check is only needed for RhodeCode internal auth calls
270 271 # in case it's a plugin we don't care
271 272 if not plugin:
272 273
273 274 # first check if we gave crypted password back, and if it
274 275 # matches it's not password change
275 276 if new_user.password == password:
276 277 return False
277 278
278 279 password_match = check_password(password, old_password)
279 280 if not password_match:
280 281 return True
281 282
282 283 return False
283 284
284 285 # read settings on default personal repo group creation
285 286 if create_repo_group is None:
286 287 default_create_repo_group = RepoGroupModel()\
287 288 .get_default_create_personal_repo_group()
288 289 create_repo_group = default_create_repo_group
289 290
290 291 user_data = {
291 292 'username': username,
292 293 'password': password,
293 294 'email': email,
294 295 'firstname': firstname,
295 296 'lastname': lastname,
296 297 'active': active,
297 298 'admin': admin
298 299 }
299 300
300 301 if updating_user_id:
301 302 log.debug('Checking for existing account in RhodeCode '
302 303 'database with user_id `%s` ' % (updating_user_id,))
303 304 user = User.get(updating_user_id)
304 305 else:
305 306 log.debug('Checking for existing account in RhodeCode '
306 307 'database with username `%s` ' % (username,))
307 308 user = User.get_by_username(username, case_insensitive=True)
308 309
309 310 if user is None:
310 311 # we check internal flag if this method is actually allowed to
311 312 # create new user
312 313 if not allow_to_create_user:
313 314 msg = ('Method wants to create new user, but it is not '
314 315 'allowed to do so')
315 316 log.warning(msg)
316 317 raise NotAllowedToCreateUserError(msg)
317 318
318 319 log.debug('Creating new user %s', username)
319 320
320 321 # only if we create user that is active
321 322 new_active_user = active
322 323 if new_active_user and strict_creation_check:
323 324 # raises UserCreationError if it's not allowed for any reason to
324 325 # create new active user, this also executes pre-create hooks
325 326 check_allowed_create_user(user_data, cur_user, strict_check=True)
326 327 events.trigger(events.UserPreCreate(user_data))
327 328 new_user = User()
328 329 edit = False
329 330 else:
330 331 log.debug('updating user %s', username)
331 332 events.trigger(events.UserPreUpdate(user, user_data))
332 333 new_user = user
333 334 edit = True
334 335
335 336 # we're not allowed to edit default user
336 337 if user.username == User.DEFAULT_USER:
337 338 raise DefaultUserException(
338 339 "You can't edit this user (`%(username)s`) since it's "
339 340 "crucial for entire application"
340 341 % {'username': user.username})
341 342
342 343 # inject special attribute that will tell us if User is new or old
343 344 new_user.is_new_user = not edit
344 345 # for users that didn's specify auth type, we use RhodeCode built in
345 346 from rhodecode.authentication.plugins import auth_rhodecode
346 347 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
347 348 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
348 349
349 350 try:
350 351 new_user.username = username
351 352 new_user.admin = admin
352 353 new_user.email = email
353 354 new_user.active = active
354 355 new_user.extern_name = safe_unicode(extern_name)
355 356 new_user.extern_type = safe_unicode(extern_type)
356 357 new_user.name = firstname
357 358 new_user.lastname = lastname
358 359
359 360 # set password only if creating an user or password is changed
360 361 if not edit or _password_change(new_user, password):
361 362 reason = 'new password' if edit else 'new user'
362 363 log.debug('Updating password reason=>%s', reason)
363 364 new_user.password = get_crypt_password(password) if password else None
364 365
365 366 if force_password_change:
366 367 new_user.update_userdata(force_password_change=True)
367 368 if language:
368 369 new_user.update_userdata(language=language)
369 370 new_user.update_userdata(notification_status=True)
370 371
371 372 self.sa.add(new_user)
372 373
373 374 if not edit and create_repo_group:
374 375 RepoGroupModel().create_personal_repo_group(
375 376 new_user, commit_early=False)
376 377
377 378 if not edit:
378 379 # add the RSS token
379 380 AuthTokenModel().create(username,
380 381 description=u'Generated feed token',
381 382 role=AuthTokenModel.cls.ROLE_FEED)
382 383 kwargs = new_user.get_dict()
383 384 # backward compat, require api_keys present
384 385 kwargs['api_keys'] = kwargs['auth_tokens']
385 386 log_create_user(created_by=cur_user, **kwargs)
386 387 events.trigger(events.UserPostCreate(user_data))
387 388 return new_user
388 389 except (DatabaseError,):
389 390 log.error(traceback.format_exc())
390 391 raise
391 392
392 393 def create_registration(self, form_data):
393 394 from rhodecode.model.notification import NotificationModel
394 395 from rhodecode.model.notification import EmailNotificationModel
395 396
396 397 try:
397 398 form_data['admin'] = False
398 399 form_data['extern_name'] = 'rhodecode'
399 400 form_data['extern_type'] = 'rhodecode'
400 401 new_user = self.create(form_data)
401 402
402 403 self.sa.add(new_user)
403 404 self.sa.flush()
404 405
405 406 user_data = new_user.get_dict()
406 407 kwargs = {
407 408 # use SQLALCHEMY safe dump of user data
408 409 'user': AttributeDict(user_data),
409 410 'date': datetime.datetime.now()
410 411 }
411 412 notification_type = EmailNotificationModel.TYPE_REGISTRATION
412 413 # pre-generate the subject for notification itself
413 414 (subject,
414 415 _h, _e, # we don't care about those
415 416 body_plaintext) = EmailNotificationModel().render_email(
416 417 notification_type, **kwargs)
417 418
418 419 # create notification objects, and emails
419 420 NotificationModel().create(
420 421 created_by=new_user,
421 422 notification_subject=subject,
422 423 notification_body=body_plaintext,
423 424 notification_type=notification_type,
424 425 recipients=None, # all admins
425 426 email_kwargs=kwargs,
426 427 )
427 428
428 429 return new_user
429 430 except Exception:
430 431 log.error(traceback.format_exc())
431 432 raise
432 433
433 434 def _handle_user_repos(self, username, repositories, handle_mode=None):
434 435 _superadmin = self.cls.get_first_super_admin()
435 436 left_overs = True
436 437
437 438 from rhodecode.model.repo import RepoModel
438 439
439 440 if handle_mode == 'detach':
440 441 for obj in repositories:
441 442 obj.user = _superadmin
442 443 # set description we know why we super admin now owns
443 444 # additional repositories that were orphaned !
444 445 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
445 446 self.sa.add(obj)
446 447 left_overs = False
447 448 elif handle_mode == 'delete':
448 449 for obj in repositories:
449 450 RepoModel().delete(obj, forks='detach')
450 451 left_overs = False
451 452
452 453 # if nothing is done we have left overs left
453 454 return left_overs
454 455
455 456 def _handle_user_repo_groups(self, username, repository_groups,
456 457 handle_mode=None):
457 458 _superadmin = self.cls.get_first_super_admin()
458 459 left_overs = True
459 460
460 461 from rhodecode.model.repo_group import RepoGroupModel
461 462
462 463 if handle_mode == 'detach':
463 464 for r in repository_groups:
464 465 r.user = _superadmin
465 466 # set description we know why we super admin now owns
466 467 # additional repositories that were orphaned !
467 468 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
468 469 self.sa.add(r)
469 470 left_overs = False
470 471 elif handle_mode == 'delete':
471 472 for r in repository_groups:
472 473 RepoGroupModel().delete(r)
473 474 left_overs = False
474 475
475 476 # if nothing is done we have left overs left
476 477 return left_overs
477 478
478 479 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
479 480 _superadmin = self.cls.get_first_super_admin()
480 481 left_overs = True
481 482
482 483 from rhodecode.model.user_group import UserGroupModel
483 484
484 485 if handle_mode == 'detach':
485 486 for r in user_groups:
486 487 for user_user_group_to_perm in r.user_user_group_to_perm:
487 488 if user_user_group_to_perm.user.username == username:
488 489 user_user_group_to_perm.user = _superadmin
489 490 r.user = _superadmin
490 491 # set description we know why we super admin now owns
491 492 # additional repositories that were orphaned !
492 493 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
493 494 self.sa.add(r)
494 495 left_overs = False
495 496 elif handle_mode == 'delete':
496 497 for r in user_groups:
497 498 UserGroupModel().delete(r)
498 499 left_overs = False
499 500
500 501 # if nothing is done we have left overs left
501 502 return left_overs
502 503
503 504 def delete(self, user, cur_user=None, handle_repos=None,
504 505 handle_repo_groups=None, handle_user_groups=None):
505 506 if not cur_user:
506 507 cur_user = getattr(
507 508 get_current_rhodecode_user(), 'username', None)
508 509 user = self._get_user(user)
509 510
510 511 try:
511 512 if user.username == User.DEFAULT_USER:
512 513 raise DefaultUserException(
513 514 u"You can't remove this user since it's"
514 515 u" crucial for entire application")
515 516
516 517 left_overs = self._handle_user_repos(
517 518 user.username, user.repositories, handle_repos)
518 519 if left_overs and user.repositories:
519 520 repos = [x.repo_name for x in user.repositories]
520 521 raise UserOwnsReposException(
521 522 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
522 523 u'removed. Switch owners or remove those repositories:%(list_repos)s'
523 524 % {'username': user.username, 'len_repos': len(repos),
524 525 'list_repos': ', '.join(repos)})
525 526
526 527 left_overs = self._handle_user_repo_groups(
527 528 user.username, user.repository_groups, handle_repo_groups)
528 529 if left_overs and user.repository_groups:
529 530 repo_groups = [x.group_name for x in user.repository_groups]
530 531 raise UserOwnsRepoGroupsException(
531 532 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
532 533 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
533 534 % {'username': user.username, 'len_repo_groups': len(repo_groups),
534 535 'list_repo_groups': ', '.join(repo_groups)})
535 536
536 537 left_overs = self._handle_user_user_groups(
537 538 user.username, user.user_groups, handle_user_groups)
538 539 if left_overs and user.user_groups:
539 540 user_groups = [x.users_group_name for x in user.user_groups]
540 541 raise UserOwnsUserGroupsException(
541 542 u'user "%s" still owns %s user groups and cannot be '
542 543 u'removed. Switch owners or remove those user groups:%s'
543 544 % (user.username, len(user_groups), ', '.join(user_groups)))
544 545
545 546 # we might change the user data with detach/delete, make sure
546 547 # the object is marked as expired before actually deleting !
547 548 self.sa.expire(user)
548 549 self.sa.delete(user)
549 550 from rhodecode.lib.hooks_base import log_delete_user
550 551 log_delete_user(deleted_by=cur_user, **user.get_dict())
551 552 except Exception:
552 553 log.error(traceback.format_exc())
553 554 raise
554 555
555 556 def reset_password_link(self, data, pwd_reset_url):
556 557 from rhodecode.lib.celerylib import tasks, run_task
557 558 from rhodecode.model.notification import EmailNotificationModel
558 559 user_email = data['email']
559 560 try:
560 561 user = User.get_by_email(user_email)
561 562 if user:
562 563 log.debug('password reset user found %s', user)
563 564
564 565 email_kwargs = {
565 566 'password_reset_url': pwd_reset_url,
566 567 'user': user,
567 568 'email': user_email,
568 569 'date': datetime.datetime.now()
569 570 }
570 571
571 572 (subject, headers, email_body,
572 573 email_body_plaintext) = EmailNotificationModel().render_email(
573 574 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
574 575
575 576 recipients = [user_email]
576 577
577 578 action_logger_generic(
578 579 'sending password reset email to user: {}'.format(
579 580 user), namespace='security.password_reset')
580 581
581 582 run_task(tasks.send_email, recipients, subject,
582 583 email_body_plaintext, email_body)
583 584
584 585 else:
585 586 log.debug("password reset email %s not found", user_email)
586 587 except Exception:
587 588 log.error(traceback.format_exc())
588 589 return False
589 590
590 591 return True
591 592
592 593 def reset_password(self, data):
593 594 from rhodecode.lib.celerylib import tasks, run_task
594 595 from rhodecode.model.notification import EmailNotificationModel
595 596 from rhodecode.lib import auth
596 597 user_email = data['email']
597 598 pre_db = True
598 599 try:
599 600 user = User.get_by_email(user_email)
600 601 new_passwd = auth.PasswordGenerator().gen_password(
601 602 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
602 603 if user:
603 604 user.password = auth.get_crypt_password(new_passwd)
604 605 # also force this user to reset his password !
605 606 user.update_userdata(force_password_change=True)
606 607
607 608 Session().add(user)
608 609
609 610 # now delete the token in question
610 611 UserApiKeys = AuthTokenModel.cls
611 612 UserApiKeys().query().filter(
612 613 UserApiKeys.api_key == data['token']).delete()
613 614
614 615 Session().commit()
615 616 log.info('successfully reset password for `%s`', user_email)
616 617
617 618 if new_passwd is None:
618 619 raise Exception('unable to generate new password')
619 620
620 621 pre_db = False
621 622
622 623 email_kwargs = {
623 624 'new_password': new_passwd,
624 625 'user': user,
625 626 'email': user_email,
626 627 'date': datetime.datetime.now()
627 628 }
628 629
629 630 (subject, headers, email_body,
630 631 email_body_plaintext) = EmailNotificationModel().render_email(
631 632 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
632 633 **email_kwargs)
633 634
634 635 recipients = [user_email]
635 636
636 637 action_logger_generic(
637 638 'sent new password to user: {} with email: {}'.format(
638 639 user, user_email), namespace='security.password_reset')
639 640
640 641 run_task(tasks.send_email, recipients, subject,
641 642 email_body_plaintext, email_body)
642 643
643 644 except Exception:
644 645 log.error('Failed to update user password')
645 646 log.error(traceback.format_exc())
646 647 if pre_db:
647 648 # we rollback only if local db stuff fails. If it goes into
648 649 # run_task, we're pass rollback state this wouldn't work then
649 650 Session().rollback()
650 651
651 652 return True
652 653
653 654 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
654 655 """
655 656 Fetches auth_user by user_id,or api_key if present.
656 657 Fills auth_user attributes with those taken from database.
657 658 Additionally set's is_authenitated if lookup fails
658 659 present in database
659 660
660 661 :param auth_user: instance of user to set attributes
661 662 :param user_id: user id to fetch by
662 663 :param api_key: api key to fetch by
663 664 :param username: username to fetch by
664 665 """
665 666 if user_id is None and api_key is None and username is None:
666 667 raise Exception('You need to pass user_id, api_key or username')
667 668
668 669 log.debug(
669 670 'AuthUser: fill data execution based on: '
670 671 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
671 672 try:
672 673 dbuser = None
673 674 if user_id:
674 675 dbuser = self.get(user_id)
675 676 elif api_key:
676 677 dbuser = self.get_by_auth_token(api_key)
677 678 elif username:
678 679 dbuser = self.get_by_username(username)
679 680
680 681 if not dbuser:
681 682 log.warning(
682 683 'Unable to lookup user by id:%s api_key:%s username:%s',
683 684 user_id, api_key, username)
684 685 return False
685 686 if not dbuser.active:
686 687 log.debug('User `%s:%s` is inactive, skipping fill data',
687 688 username, user_id)
688 689 return False
689 690
690 691 log.debug('AuthUser: filling found user:%s data', dbuser)
691 692 user_data = dbuser.get_dict()
692 693
693 694 user_data.update({
694 695 # set explicit the safe escaped values
695 696 'first_name': dbuser.first_name,
696 697 'last_name': dbuser.last_name,
697 698 })
698 699
699 700 for k, v in user_data.items():
700 701 # properties of auth user we dont update
701 702 if k not in ['auth_tokens', 'permissions']:
702 703 setattr(auth_user, k, v)
703 704
704 705 except Exception:
705 706 log.error(traceback.format_exc())
706 707 auth_user.is_authenticated = False
707 708 return False
708 709
709 710 return True
710 711
711 712 def has_perm(self, user, perm):
712 713 perm = self._get_perm(perm)
713 714 user = self._get_user(user)
714 715
715 716 return UserToPerm.query().filter(UserToPerm.user == user)\
716 717 .filter(UserToPerm.permission == perm).scalar() is not None
717 718
718 719 def grant_perm(self, user, perm):
719 720 """
720 721 Grant user global permissions
721 722
722 723 :param user:
723 724 :param perm:
724 725 """
725 726 user = self._get_user(user)
726 727 perm = self._get_perm(perm)
727 728 # if this permission is already granted skip it
728 729 _perm = UserToPerm.query()\
729 730 .filter(UserToPerm.user == user)\
730 731 .filter(UserToPerm.permission == perm)\
731 732 .scalar()
732 733 if _perm:
733 734 return
734 735 new = UserToPerm()
735 736 new.user = user
736 737 new.permission = perm
737 738 self.sa.add(new)
738 739 return new
739 740
740 741 def revoke_perm(self, user, perm):
741 742 """
742 743 Revoke users global permissions
743 744
744 745 :param user:
745 746 :param perm:
746 747 """
747 748 user = self._get_user(user)
748 749 perm = self._get_perm(perm)
749 750
750 751 obj = UserToPerm.query()\
751 752 .filter(UserToPerm.user == user)\
752 753 .filter(UserToPerm.permission == perm)\
753 754 .scalar()
754 755 if obj:
755 756 self.sa.delete(obj)
756 757
757 758 def add_extra_email(self, user, email):
758 759 """
759 760 Adds email address to UserEmailMap
760 761
761 762 :param user:
762 763 :param email:
763 764 """
764 765
765 766 user = self._get_user(user)
766 767
767 768 obj = UserEmailMap()
768 769 obj.user = user
769 770 obj.email = email
770 771 self.sa.add(obj)
771 772 return obj
772 773
773 774 def delete_extra_email(self, user, email_id):
774 775 """
775 776 Removes email address from UserEmailMap
776 777
777 778 :param user:
778 779 :param email_id:
779 780 """
780 781 user = self._get_user(user)
781 782 obj = UserEmailMap.query().get(email_id)
782 783 if obj and obj.user_id == user.user_id:
783 784 self.sa.delete(obj)
784 785
785 786 def parse_ip_range(self, ip_range):
786 787 ip_list = []
787 788
788 789 def make_unique(value):
789 790 seen = []
790 791 return [c for c in value if not (c in seen or seen.append(c))]
791 792
792 793 # firsts split by commas
793 794 for ip_range in ip_range.split(','):
794 795 if not ip_range:
795 796 continue
796 797 ip_range = ip_range.strip()
797 798 if '-' in ip_range:
798 799 start_ip, end_ip = ip_range.split('-', 1)
799 800 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
800 801 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
801 802 parsed_ip_range = []
802 803
803 804 for index in xrange(int(start_ip), int(end_ip) + 1):
804 805 new_ip = ipaddress.ip_address(index)
805 806 parsed_ip_range.append(str(new_ip))
806 807 ip_list.extend(parsed_ip_range)
807 808 else:
808 809 ip_list.append(ip_range)
809 810
810 811 return make_unique(ip_list)
811 812
812 813 def add_extra_ip(self, user, ip, description=None):
813 814 """
814 815 Adds ip address to UserIpMap
815 816
816 817 :param user:
817 818 :param ip:
818 819 """
819 820
820 821 user = self._get_user(user)
821 822 obj = UserIpMap()
822 823 obj.user = user
823 824 obj.ip_addr = ip
824 825 obj.description = description
825 826 self.sa.add(obj)
826 827 return obj
827 828
828 829 def delete_extra_ip(self, user, ip_id):
829 830 """
830 831 Removes ip address from UserIpMap
831 832
832 833 :param user:
833 834 :param ip_id:
834 835 """
835 836 user = self._get_user(user)
836 837 obj = UserIpMap.query().get(ip_id)
837 838 if obj and obj.user_id == user.user_id:
838 839 self.sa.delete(obj)
839 840
840 841 def get_accounts_in_creation_order(self, current_user=None):
841 842 """
842 843 Get accounts in order of creation for deactivation for license limits
843 844
844 845 pick currently logged in user, and append to the list in position 0
845 846 pick all super-admins in order of creation date and add it to the list
846 847 pick all other accounts in order of creation and add it to the list.
847 848
848 849 Based on that list, the last accounts can be disabled as they are
849 850 created at the end and don't include any of the super admins as well
850 851 as the current user.
851 852
852 853 :param current_user: optionally current user running this operation
853 854 """
854 855
855 856 if not current_user:
856 857 current_user = get_current_rhodecode_user()
857 858 active_super_admins = [
858 859 x.user_id for x in User.query()
859 860 .filter(User.user_id != current_user.user_id)
860 861 .filter(User.active == true())
861 862 .filter(User.admin == true())
862 863 .order_by(User.created_on.asc())]
863 864
864 865 active_regular_users = [
865 866 x.user_id for x in User.query()
866 867 .filter(User.user_id != current_user.user_id)
867 868 .filter(User.active == true())
868 869 .filter(User.admin == false())
869 870 .order_by(User.created_on.asc())]
870 871
871 872 list_of_accounts = [current_user.user_id]
872 873 list_of_accounts += active_super_admins
873 874 list_of_accounts += active_regular_users
874 875
875 876 return list_of_accounts
876 877
877 878 def deactivate_last_users(self, expected_users, current_user=None):
878 879 """
879 880 Deactivate accounts that are over the license limits.
880 881 Algorithm of which accounts to disabled is based on the formula:
881 882
882 883 Get current user, then super admins in creation order, then regular
883 884 active users in creation order.
884 885
885 886 Using that list we mark all accounts from the end of it as inactive.
886 887 This way we block only latest created accounts.
887 888
888 889 :param expected_users: list of users in special order, we deactivate
889 890 the end N ammoun of users from that list
890 891 """
891 892
892 893 list_of_accounts = self.get_accounts_in_creation_order(
893 894 current_user=current_user)
894 895
895 896 for acc_id in list_of_accounts[expected_users + 1:]:
896 897 user = User.get(acc_id)
897 898 log.info('Deactivating account %s for license unlock', user)
898 899 user.active = False
899 900 Session().add(user)
900 901 Session().commit()
901 902
902 903 return
903 904
904 905 def get_user_log(self, user, filter_term):
905 906 user_log = UserLog.query()\
906 907 .filter(or_(UserLog.user_id == user.user_id,
907 908 UserLog.username == user.username))\
908 909 .options(joinedload(UserLog.user))\
909 910 .options(joinedload(UserLog.repository))\
910 911 .order_by(UserLog.action_date.desc())
911 912
912 913 user_log = user_log_filter(user_log, filter_term)
913 914 return user_log
@@ -1,34 +1,35 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 from rhodecode.model.validation_schema import validators, preparers, types
23 23
24 24
25 25 class ReviewerSchema(colander.MappingSchema):
26 26 username = colander.SchemaNode(types.StrOrIntType())
27 27 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
28 28 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
29 rules = colander.SchemaNode(colander.List(), missing=[])
29 30
30 31
31 32 class ReviewerListSchema(colander.SequenceSchema):
32 33 reviewers = ReviewerSchema()
33 34
34 35
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now