##// END OF EJS Templates
default-reviewers: introduce new voting rule logic that allows...
marcink -
r2484:3775edd6 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,38 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_11_0_0 as db
18
19 reviewers_table = db.PullRequestReviewers.__table__
20
21 rule_data = Column(
22 'rule_data_json',
23 db.JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
24 rule_data.create(table=reviewers_table)
25
26 # issue fixups
27 fixups(db, meta.Session)
28
29
30 def downgrade(migrate_engine):
31 meta = MetaData()
32 meta.bind = migrate_engine
33
34
35 def fixups(models, _SESSION):
36 pass
37
38
@@ -0,0 +1,37 b''
1 import logging
2
3 from sqlalchemy import *
4
5 from rhodecode.model import meta
6 from rhodecode.lib.dbmigrate.versions import _reset_base, notify
7
8 log = logging.getLogger(__name__)
9
10
11 def upgrade(migrate_engine):
12 """
13 Upgrade operations go here.
14 Don't create your own engine; bind migrate_engine to your metadata
15 """
16 _reset_base(migrate_engine)
17 from rhodecode.lib.dbmigrate.schema import db_4_11_0_0 as db
18
19 user_group_review_table = db.RepoReviewRuleUserGroup.__table__
20
21 vote_rule = Column("vote_rule", Integer(), nullable=True,
22 default=-1)
23 vote_rule.create(table=user_group_review_table)
24
25 # issue fixups
26 fixups(db, meta.Session)
27
28
29 def downgrade(migrate_engine):
30 meta = MetaData()
31 meta.bind = migrate_engine
32
33
34 def fixups(models, _SESSION):
35 pass
36
37
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,194 +1,196 b''
1 {
1 {
2 "dirs": {
2 "dirs": {
3 "css": {
3 "css": {
4 "src":"rhodecode/public/css",
4 "src":"rhodecode/public/css",
5 "dest":"rhodecode/public/css"
5 "dest":"rhodecode/public/css"
6 },
6 },
7 "js": {
7 "js": {
8 "src": "rhodecode/public/js/src",
8 "src": "rhodecode/public/js/src",
9 "src_rc": "rhodecode/public/js/rhodecode",
9 "src_rc": "rhodecode/public/js/rhodecode",
10 "dest": "rhodecode/public/js",
10 "dest": "rhodecode/public/js",
11 "bower": "bower_components",
11 "bower": "bower_components",
12 "node_modules": "node_modules"
12 "node_modules": "node_modules"
13 }
13 }
14 },
14 },
15 "copy": {
15 "copy": {
16 "main": {
16 "main": {
17 "expand": true,
17 "expand": true,
18 "cwd": "bower_components",
18 "cwd": "bower_components",
19 "src": "webcomponentsjs/webcomponents-lite.js",
19 "src": "webcomponentsjs/webcomponents-lite.js",
20 "dest": "<%= dirs.js.dest %>/vendors"
20 "dest": "<%= dirs.js.dest %>/vendors"
21 }
21 }
22 },
22 },
23 "concat": {
23 "concat": {
24 "polymercss": {
24 "polymercss": {
25 "src": [
25 "src": [
26 "<%= dirs.js.src %>/components/root-styles-prefix.html",
26 "<%= dirs.js.src %>/components/root-styles-prefix.html",
27 "<%= dirs.css.src %>/style-polymer.css",
27 "<%= dirs.css.src %>/style-polymer.css",
28 "<%= dirs.js.src %>/components/root-styles-suffix.html"
28 "<%= dirs.js.src %>/components/root-styles-suffix.html"
29 ],
29 ],
30 "dest": "<%= dirs.js.dest %>/src/components/root-styles.gen.html",
30 "dest": "<%= dirs.js.dest %>/src/components/root-styles.gen.html",
31 "nonull": true
31 "nonull": true
32 },
32 },
33 "dist": {
33 "dist": {
34 "src": [
34 "src": [
35 "<%= dirs.js.node_modules %>/jquery/dist/jquery.min.js",
35 "<%= dirs.js.node_modules %>/jquery/dist/jquery.min.js",
36 "<%= dirs.js.node_modules %>/mousetrap/mousetrap.min.js",
36 "<%= dirs.js.node_modules %>/mousetrap/mousetrap.min.js",
37 "<%= dirs.js.node_modules %>/moment/min/moment.min.js",
37 "<%= dirs.js.node_modules %>/moment/min/moment.min.js",
38 "<%= dirs.js.node_modules %>/clipboard/dist/clipboard.min.js",
38 "<%= dirs.js.node_modules %>/clipboard/dist/clipboard.min.js",
39 "<%= dirs.js.node_modules %>/favico.js/favico-0.3.10.min.js",
39 "<%= dirs.js.node_modules %>/favico.js/favico-0.3.10.min.js",
40 "<%= dirs.js.node_modules %>/appenlight-client/appenlight-client.min.js",
40 "<%= dirs.js.node_modules %>/appenlight-client/appenlight-client.min.js",
41 "<%= dirs.js.src %>/logging.js",
41 "<%= dirs.js.src %>/logging.js",
42 "<%= dirs.js.src %>/bootstrap.js",
42 "<%= dirs.js.src %>/bootstrap.js",
43 "<%= dirs.js.src %>/i18n_utils.js",
43 "<%= dirs.js.src %>/i18n_utils.js",
44 "<%= dirs.js.src %>/deform.js",
44 "<%= dirs.js.src %>/deform.js",
45 "<%= dirs.js.src %>/ejs.js",
46 "<%= dirs.js.src %>/ejs_templates/utils.js",
45 "<%= dirs.js.src %>/plugins/jquery.pjax.js",
47 "<%= dirs.js.src %>/plugins/jquery.pjax.js",
46 "<%= dirs.js.src %>/plugins/jquery.dataTables.js",
48 "<%= dirs.js.src %>/plugins/jquery.dataTables.js",
47 "<%= dirs.js.src %>/plugins/flavoured_checkbox.js",
49 "<%= dirs.js.src %>/plugins/flavoured_checkbox.js",
48 "<%= dirs.js.src %>/plugins/jquery.auto-grow-input.js",
50 "<%= dirs.js.src %>/plugins/jquery.auto-grow-input.js",
49 "<%= dirs.js.src %>/plugins/jquery.autocomplete.js",
51 "<%= dirs.js.src %>/plugins/jquery.autocomplete.js",
50 "<%= dirs.js.src %>/plugins/jquery.debounce.js",
52 "<%= dirs.js.src %>/plugins/jquery.debounce.js",
51 "<%= dirs.js.src %>/plugins/jquery.mark.js",
53 "<%= dirs.js.src %>/plugins/jquery.mark.js",
52 "<%= dirs.js.src %>/plugins/jquery.timeago.js",
54 "<%= dirs.js.src %>/plugins/jquery.timeago.js",
53 "<%= dirs.js.src %>/plugins/jquery.timeago-extension.js",
55 "<%= dirs.js.src %>/plugins/jquery.timeago-extension.js",
54 "<%= dirs.js.src %>/select2/select2.js",
56 "<%= dirs.js.src %>/select2/select2.js",
55 "<%= dirs.js.src %>/codemirror/codemirror.js",
57 "<%= dirs.js.src %>/codemirror/codemirror.js",
56 "<%= dirs.js.src %>/codemirror/codemirror_loadmode.js",
58 "<%= dirs.js.src %>/codemirror/codemirror_loadmode.js",
57 "<%= dirs.js.src %>/codemirror/codemirror_hint.js",
59 "<%= dirs.js.src %>/codemirror/codemirror_hint.js",
58 "<%= dirs.js.src %>/codemirror/codemirror_overlay.js",
60 "<%= dirs.js.src %>/codemirror/codemirror_overlay.js",
59 "<%= dirs.js.src %>/codemirror/codemirror_placeholder.js",
61 "<%= dirs.js.src %>/codemirror/codemirror_placeholder.js",
60 "<%= dirs.js.src %>/codemirror/codemirror_simplemode.js",
62 "<%= dirs.js.src %>/codemirror/codemirror_simplemode.js",
61 "<%= dirs.js.dest %>/mode/meta.js",
63 "<%= dirs.js.dest %>/mode/meta.js",
62 "<%= dirs.js.dest %>/mode/meta_ext.js",
64 "<%= dirs.js.dest %>/mode/meta_ext.js",
63 "<%= dirs.js.src_rc %>/i18n/select2/translations.js",
65 "<%= dirs.js.src_rc %>/i18n/select2/translations.js",
64 "<%= dirs.js.src %>/rhodecode/utils/array.js",
66 "<%= dirs.js.src %>/rhodecode/utils/array.js",
65 "<%= dirs.js.src %>/rhodecode/utils/string.js",
67 "<%= dirs.js.src %>/rhodecode/utils/string.js",
66 "<%= dirs.js.src %>/rhodecode/utils/pyroutes.js",
68 "<%= dirs.js.src %>/rhodecode/utils/pyroutes.js",
67 "<%= dirs.js.src %>/rhodecode/utils/ajax.js",
69 "<%= dirs.js.src %>/rhodecode/utils/ajax.js",
68 "<%= dirs.js.src %>/rhodecode/utils/autocomplete.js",
70 "<%= dirs.js.src %>/rhodecode/utils/autocomplete.js",
69 "<%= dirs.js.src %>/rhodecode/utils/colorgenerator.js",
71 "<%= dirs.js.src %>/rhodecode/utils/colorgenerator.js",
70 "<%= dirs.js.src %>/rhodecode/utils/ie.js",
72 "<%= dirs.js.src %>/rhodecode/utils/ie.js",
71 "<%= dirs.js.src %>/rhodecode/utils/os.js",
73 "<%= dirs.js.src %>/rhodecode/utils/os.js",
72 "<%= dirs.js.src %>/rhodecode/utils/topics.js",
74 "<%= dirs.js.src %>/rhodecode/utils/topics.js",
73 "<%= dirs.js.src %>/rhodecode/init.js",
75 "<%= dirs.js.src %>/rhodecode/init.js",
74 "<%= dirs.js.src %>/rhodecode/changelog.js",
76 "<%= dirs.js.src %>/rhodecode/changelog.js",
75 "<%= dirs.js.src %>/rhodecode/codemirror.js",
77 "<%= dirs.js.src %>/rhodecode/codemirror.js",
76 "<%= dirs.js.src %>/rhodecode/comments.js",
78 "<%= dirs.js.src %>/rhodecode/comments.js",
77 "<%= dirs.js.src %>/rhodecode/constants.js",
79 "<%= dirs.js.src %>/rhodecode/constants.js",
78 "<%= dirs.js.src %>/rhodecode/files.js",
80 "<%= dirs.js.src %>/rhodecode/files.js",
79 "<%= dirs.js.src %>/rhodecode/followers.js",
81 "<%= dirs.js.src %>/rhodecode/followers.js",
80 "<%= dirs.js.src %>/rhodecode/menus.js",
82 "<%= dirs.js.src %>/rhodecode/menus.js",
81 "<%= dirs.js.src %>/rhodecode/notifications.js",
83 "<%= dirs.js.src %>/rhodecode/notifications.js",
82 "<%= dirs.js.src %>/rhodecode/permissions.js",
84 "<%= dirs.js.src %>/rhodecode/permissions.js",
83 "<%= dirs.js.src %>/rhodecode/pjax.js",
85 "<%= dirs.js.src %>/rhodecode/pjax.js",
84 "<%= dirs.js.src %>/rhodecode/pullrequests.js",
86 "<%= dirs.js.src %>/rhodecode/pullrequests.js",
85 "<%= dirs.js.src %>/rhodecode/settings.js",
87 "<%= dirs.js.src %>/rhodecode/settings.js",
86 "<%= dirs.js.src %>/rhodecode/select2_widgets.js",
88 "<%= dirs.js.src %>/rhodecode/select2_widgets.js",
87 "<%= dirs.js.src %>/rhodecode/tooltips.js",
89 "<%= dirs.js.src %>/rhodecode/tooltips.js",
88 "<%= dirs.js.src %>/rhodecode/users.js",
90 "<%= dirs.js.src %>/rhodecode/users.js",
89 "<%= dirs.js.src %>/rhodecode/appenlight.js",
91 "<%= dirs.js.src %>/rhodecode/appenlight.js",
90 "<%= dirs.js.src %>/rhodecode.js"
92 "<%= dirs.js.src %>/rhodecode.js"
91 ],
93 ],
92 "dest": "<%= dirs.js.dest %>/scripts.js",
94 "dest": "<%= dirs.js.dest %>/scripts.js",
93 "nonull": true
95 "nonull": true
94 }
96 }
95 },
97 },
96 "crisper": {
98 "crisper": {
97 "dist": {
99 "dist": {
98 "options": {
100 "options": {
99 "cleanup": false,
101 "cleanup": false,
100 "onlySplit": true
102 "onlySplit": true
101 },
103 },
102 "src": "<%= dirs.js.dest %>/rhodecode-components.html",
104 "src": "<%= dirs.js.dest %>/rhodecode-components.html",
103 "dest": "<%= dirs.js.dest %>/rhodecode-components.js"
105 "dest": "<%= dirs.js.dest %>/rhodecode-components.js"
104 }
106 }
105 },
107 },
106 "less": {
108 "less": {
107 "development": {
109 "development": {
108 "options": {
110 "options": {
109 "compress": false,
111 "compress": false,
110 "yuicompress": false,
112 "yuicompress": false,
111 "optimization": 0
113 "optimization": 0
112 },
114 },
113 "files": {
115 "files": {
114 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
116 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
115 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
117 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
116 }
118 }
117 },
119 },
118 "production": {
120 "production": {
119 "options": {
121 "options": {
120 "compress": true,
122 "compress": true,
121 "yuicompress": true,
123 "yuicompress": true,
122 "optimization": 2
124 "optimization": 2
123 },
125 },
124 "files": {
126 "files": {
125 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
127 "<%= dirs.css.dest %>/style.css": "<%= dirs.css.src %>/main.less",
126 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
128 "<%= dirs.css.dest %>/style-polymer.css": "<%= dirs.css.src %>/polymer.less"
127 }
129 }
128 },
130 },
129 "components": {
131 "components": {
130 "files": [
132 "files": [
131 {
133 {
132 "cwd": "<%= dirs.js.src %>/components/",
134 "cwd": "<%= dirs.js.src %>/components/",
133 "dest": "<%= dirs.js.src %>/components/",
135 "dest": "<%= dirs.js.src %>/components/",
134 "src": [
136 "src": [
135 "**/*.less"
137 "**/*.less"
136 ],
138 ],
137 "expand": true,
139 "expand": true,
138 "ext": ".css"
140 "ext": ".css"
139 }
141 }
140 ]
142 ]
141 }
143 }
142 },
144 },
143 "watch": {
145 "watch": {
144 "less": {
146 "less": {
145 "files": [
147 "files": [
146 "<%= dirs.css.src %>/**/*.less",
148 "<%= dirs.css.src %>/**/*.less",
147 "<%= dirs.js.src %>/components/**/*.less"
149 "<%= dirs.js.src %>/components/**/*.less"
148 ],
150 ],
149 "tasks": [
151 "tasks": [
150 "less:development",
152 "less:development",
151 "less:components",
153 "less:components",
152 "concat:polymercss",
154 "concat:polymercss",
153 "vulcanize",
155 "vulcanize",
154 "crisper",
156 "crisper",
155 "concat:dist"
157 "concat:dist"
156 ]
158 ]
157 },
159 },
158 "js": {
160 "js": {
159 "files": [
161 "files": [
160 "!<%= dirs.js.src %>/components/root-styles.gen.html",
162 "!<%= dirs.js.src %>/components/root-styles.gen.html",
161 "<%= dirs.js.src %>/**/*.js",
163 "<%= dirs.js.src %>/**/*.js",
162 "<%= dirs.js.src %>/components/**/*.html"
164 "<%= dirs.js.src %>/components/**/*.html"
163 ],
165 ],
164 "tasks": [
166 "tasks": [
165 "less:components",
167 "less:components",
166 "concat:polymercss",
168 "concat:polymercss",
167 "vulcanize",
169 "vulcanize",
168 "crisper",
170 "crisper",
169 "concat:dist"
171 "concat:dist"
170 ]
172 ]
171 }
173 }
172 },
174 },
173 "jshint": {
175 "jshint": {
174 "rhodecode": {
176 "rhodecode": {
175 "src": "<%= dirs.js.src %>/rhodecode/**/*.js",
177 "src": "<%= dirs.js.src %>/rhodecode/**/*.js",
176 "options": {
178 "options": {
177 "jshintrc": ".jshintrc"
179 "jshintrc": ".jshintrc"
178 }
180 }
179 }
181 }
180 },
182 },
181 "vulcanize": {
183 "vulcanize": {
182 "default": {
184 "default": {
183 "options": {
185 "options": {
184 "abspath": "",
186 "abspath": "",
185 "inlineScripts": true,
187 "inlineScripts": true,
186 "inlineCss": true,
188 "inlineCss": true,
187 "stripComments": true
189 "stripComments": true
188 },
190 },
189 "files": {
191 "files": {
190 "<%= dirs.js.dest %>/rhodecode-components.html": "<%= dirs.js.src %>/components/shared-components.html"
192 "<%= dirs.js.dest %>/rhodecode-components.html": "<%= dirs.js.src %>/components/shared-components.html"
191 }
193 }
192 }
194 }
193 }
195 }
194 }
196 }
@@ -1,63 +1,63 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22
22
23 RhodeCode, a web based repository management software
23 RhodeCode, a web based repository management software
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
24 versioning implementation: http://www.python.org/dev/peps/pep-0386/
25 """
25 """
26
26
27 import os
27 import os
28 import sys
28 import sys
29 import platform
29 import platform
30
30
31 VERSION = tuple(open(os.path.join(
31 VERSION = tuple(open(os.path.join(
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
32 os.path.dirname(__file__), 'VERSION')).read().split('.'))
33
33
34 BACKENDS = {
34 BACKENDS = {
35 'hg': 'Mercurial repository',
35 'hg': 'Mercurial repository',
36 'git': 'Git repository',
36 'git': 'Git repository',
37 'svn': 'Subversion repository',
37 'svn': 'Subversion repository',
38 }
38 }
39
39
40 CELERY_ENABLED = False
40 CELERY_ENABLED = False
41 CELERY_EAGER = False
41 CELERY_EAGER = False
42
42
43 # link to config for pyramid
43 # link to config for pyramid
44 CONFIG = {}
44 CONFIG = {}
45
45
46 # Populated with the settings dictionary from application init in
46 # Populated with the settings dictionary from application init in
47 # rhodecode.conf.environment.load_pyramid_environment
47 # rhodecode.conf.environment.load_pyramid_environment
48 PYRAMID_SETTINGS = {}
48 PYRAMID_SETTINGS = {}
49
49
50 # Linked module for extensions
50 # Linked module for extensions
51 EXTENSIONS = {}
51 EXTENSIONS = {}
52
52
53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
53 __version__ = ('.'.join((str(each) for each in VERSION[:3])))
54 __dbversion__ = 83 # defines current db version for migrations
54 __dbversion__ = 85 # defines current db version for migrations
55 __platform__ = platform.system()
55 __platform__ = platform.system()
56 __license__ = 'AGPLv3, and Commercial License'
56 __license__ = 'AGPLv3, and Commercial License'
57 __author__ = 'RhodeCode GmbH'
57 __author__ = 'RhodeCode GmbH'
58 __url__ = 'https://code.rhodecode.com'
58 __url__ = 'https://code.rhodecode.com'
59
59
60 is_windows = __platform__ in ['Windows']
60 is_windows = __platform__ in ['Windows']
61 is_unix = not is_windows
61 is_unix = not is_windows
62 is_test = False
62 is_test = False
63 disable_error_handler = False
63 disable_error_handler = False
@@ -1,142 +1,142 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23 import urlobject
23 import urlobject
24
24
25 from rhodecode.api.tests.utils import (
25 from rhodecode.api.tests.utils import (
26 build_data, api_call, assert_error, assert_ok)
26 build_data, api_call, assert_error, assert_ok)
27 from rhodecode.lib import helpers as h
27 from rhodecode.lib import helpers as h
28 from rhodecode.lib.utils2 import safe_unicode
28 from rhodecode.lib.utils2 import safe_unicode
29
29
30 pytestmark = pytest.mark.backends("git", "hg")
30 pytestmark = pytest.mark.backends("git", "hg")
31
31
32
32
33 @pytest.mark.usefixtures("testuser_api", "app")
33 @pytest.mark.usefixtures("testuser_api", "app")
34 class TestGetPullRequest(object):
34 class TestGetPullRequest(object):
35
35
36 def test_api_get_pull_request(self, pr_util, http_host_only_stub):
36 def test_api_get_pull_request(self, pr_util, http_host_only_stub):
37 from rhodecode.model.pull_request import PullRequestModel
37 from rhodecode.model.pull_request import PullRequestModel
38 pull_request = pr_util.create_pull_request(mergeable=True)
38 pull_request = pr_util.create_pull_request(mergeable=True)
39 id_, params = build_data(
39 id_, params = build_data(
40 self.apikey, 'get_pull_request',
40 self.apikey, 'get_pull_request',
41 pullrequestid=pull_request.pull_request_id)
41 pullrequestid=pull_request.pull_request_id)
42
42
43 response = api_call(self.app, params)
43 response = api_call(self.app, params)
44
44
45 assert response.status == '200 OK'
45 assert response.status == '200 OK'
46
46
47 url_obj = urlobject.URLObject(
47 url_obj = urlobject.URLObject(
48 h.route_url(
48 h.route_url(
49 'pullrequest_show',
49 'pullrequest_show',
50 repo_name=pull_request.target_repo.repo_name,
50 repo_name=pull_request.target_repo.repo_name,
51 pull_request_id=pull_request.pull_request_id))
51 pull_request_id=pull_request.pull_request_id))
52
52
53 pr_url = safe_unicode(
53 pr_url = safe_unicode(
54 url_obj.with_netloc(http_host_only_stub))
54 url_obj.with_netloc(http_host_only_stub))
55 source_url = safe_unicode(
55 source_url = safe_unicode(
56 pull_request.source_repo.clone_url().with_netloc(http_host_only_stub))
56 pull_request.source_repo.clone_url().with_netloc(http_host_only_stub))
57 target_url = safe_unicode(
57 target_url = safe_unicode(
58 pull_request.target_repo.clone_url().with_netloc(http_host_only_stub))
58 pull_request.target_repo.clone_url().with_netloc(http_host_only_stub))
59 shadow_url = safe_unicode(
59 shadow_url = safe_unicode(
60 PullRequestModel().get_shadow_clone_url(pull_request))
60 PullRequestModel().get_shadow_clone_url(pull_request))
61
61
62 expected = {
62 expected = {
63 'pull_request_id': pull_request.pull_request_id,
63 'pull_request_id': pull_request.pull_request_id,
64 'url': pr_url,
64 'url': pr_url,
65 'title': pull_request.title,
65 'title': pull_request.title,
66 'description': pull_request.description,
66 'description': pull_request.description,
67 'status': pull_request.status,
67 'status': pull_request.status,
68 'created_on': pull_request.created_on,
68 'created_on': pull_request.created_on,
69 'updated_on': pull_request.updated_on,
69 'updated_on': pull_request.updated_on,
70 'commit_ids': pull_request.revisions,
70 'commit_ids': pull_request.revisions,
71 'review_status': pull_request.calculated_review_status(),
71 'review_status': pull_request.calculated_review_status(),
72 'mergeable': {
72 'mergeable': {
73 'status': True,
73 'status': True,
74 'message': 'This pull request can be automatically merged.',
74 'message': 'This pull request can be automatically merged.',
75 },
75 },
76 'source': {
76 'source': {
77 'clone_url': source_url,
77 'clone_url': source_url,
78 'repository': pull_request.source_repo.repo_name,
78 'repository': pull_request.source_repo.repo_name,
79 'reference': {
79 'reference': {
80 'name': pull_request.source_ref_parts.name,
80 'name': pull_request.source_ref_parts.name,
81 'type': pull_request.source_ref_parts.type,
81 'type': pull_request.source_ref_parts.type,
82 'commit_id': pull_request.source_ref_parts.commit_id,
82 'commit_id': pull_request.source_ref_parts.commit_id,
83 },
83 },
84 },
84 },
85 'target': {
85 'target': {
86 'clone_url': target_url,
86 'clone_url': target_url,
87 'repository': pull_request.target_repo.repo_name,
87 'repository': pull_request.target_repo.repo_name,
88 'reference': {
88 'reference': {
89 'name': pull_request.target_ref_parts.name,
89 'name': pull_request.target_ref_parts.name,
90 'type': pull_request.target_ref_parts.type,
90 'type': pull_request.target_ref_parts.type,
91 'commit_id': pull_request.target_ref_parts.commit_id,
91 'commit_id': pull_request.target_ref_parts.commit_id,
92 },
92 },
93 },
93 },
94 'merge': {
94 'merge': {
95 'clone_url': shadow_url,
95 'clone_url': shadow_url,
96 'reference': {
96 'reference': {
97 'name': pull_request.shadow_merge_ref.name,
97 'name': pull_request.shadow_merge_ref.name,
98 'type': pull_request.shadow_merge_ref.type,
98 'type': pull_request.shadow_merge_ref.type,
99 'commit_id': pull_request.shadow_merge_ref.commit_id,
99 'commit_id': pull_request.shadow_merge_ref.commit_id,
100 },
100 },
101 },
101 },
102 'author': pull_request.author.get_api_data(include_secrets=False,
102 'author': pull_request.author.get_api_data(include_secrets=False,
103 details='basic'),
103 details='basic'),
104 'reviewers': [
104 'reviewers': [
105 {
105 {
106 'user': reviewer.get_api_data(include_secrets=False,
106 'user': reviewer.get_api_data(include_secrets=False,
107 details='basic'),
107 details='basic'),
108 'reasons': reasons,
108 'reasons': reasons,
109 'review_status': st[0][1].status if st else 'not_reviewed',
109 'review_status': st[0][1].status if st else 'not_reviewed',
110 }
110 }
111 for reviewer, reasons, mandatory, st in
111 for obj, reviewer, reasons, mandatory, st in
112 pull_request.reviewers_statuses()
112 pull_request.reviewers_statuses()
113 ]
113 ]
114 }
114 }
115 assert_ok(id_, expected, response.body)
115 assert_ok(id_, expected, response.body)
116
116
117 def test_api_get_pull_request_repo_error(self, pr_util):
117 def test_api_get_pull_request_repo_error(self, pr_util):
118 pull_request = pr_util.create_pull_request()
118 pull_request = pr_util.create_pull_request()
119 id_, params = build_data(
119 id_, params = build_data(
120 self.apikey, 'get_pull_request',
120 self.apikey, 'get_pull_request',
121 repoid=666, pullrequestid=pull_request.pull_request_id)
121 repoid=666, pullrequestid=pull_request.pull_request_id)
122 response = api_call(self.app, params)
122 response = api_call(self.app, params)
123
123
124 expected = 'repository `666` does not exist'
124 expected = 'repository `666` does not exist'
125 assert_error(id_, expected, given=response.body)
125 assert_error(id_, expected, given=response.body)
126
126
127 def test_api_get_pull_request_pull_request_error(self):
127 def test_api_get_pull_request_pull_request_error(self):
128 id_, params = build_data(
128 id_, params = build_data(
129 self.apikey, 'get_pull_request', pullrequestid=666)
129 self.apikey, 'get_pull_request', pullrequestid=666)
130 response = api_call(self.app, params)
130 response = api_call(self.app, params)
131
131
132 expected = 'pull request `666` does not exist'
132 expected = 'pull request `666` does not exist'
133 assert_error(id_, expected, given=response.body)
133 assert_error(id_, expected, given=response.body)
134
134
135 def test_api_get_pull_request_pull_request_error_just_pr_id(self):
135 def test_api_get_pull_request_pull_request_error_just_pr_id(self):
136 id_, params = build_data(
136 id_, params = build_data(
137 self.apikey, 'get_pull_request',
137 self.apikey, 'get_pull_request',
138 pullrequestid=666)
138 pullrequestid=666)
139 response = api_call(self.app, params)
139 response = api_call(self.app, params)
140
140
141 expected = 'pull request `666` does not exist'
141 expected = 'pull request `666` does not exist'
142 assert_error(id_, expected, given=response.body)
142 assert_error(id_, expected, given=response.body)
@@ -1,213 +1,213 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from rhodecode.lib.vcs.nodes import FileNode
23 from rhodecode.lib.vcs.nodes import FileNode
24 from rhodecode.model.db import User
24 from rhodecode.model.db import User
25 from rhodecode.model.pull_request import PullRequestModel
25 from rhodecode.model.pull_request import PullRequestModel
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
27 from rhodecode.api.tests.utils import (
27 from rhodecode.api.tests.utils import (
28 build_data, api_call, assert_ok, assert_error)
28 build_data, api_call, assert_ok, assert_error)
29
29
30
30
31 @pytest.mark.usefixtures("testuser_api", "app")
31 @pytest.mark.usefixtures("testuser_api", "app")
32 class TestUpdatePullRequest(object):
32 class TestUpdatePullRequest(object):
33
33
34 @pytest.mark.backends("git", "hg")
34 @pytest.mark.backends("git", "hg")
35 def test_api_update_pull_request_title_or_description(
35 def test_api_update_pull_request_title_or_description(
36 self, pr_util, no_notifications):
36 self, pr_util, no_notifications):
37 pull_request = pr_util.create_pull_request()
37 pull_request = pr_util.create_pull_request()
38
38
39 id_, params = build_data(
39 id_, params = build_data(
40 self.apikey, 'update_pull_request',
40 self.apikey, 'update_pull_request',
41 repoid=pull_request.target_repo.repo_name,
41 repoid=pull_request.target_repo.repo_name,
42 pullrequestid=pull_request.pull_request_id,
42 pullrequestid=pull_request.pull_request_id,
43 title='New TITLE OF A PR',
43 title='New TITLE OF A PR',
44 description='New DESC OF A PR',
44 description='New DESC OF A PR',
45 )
45 )
46 response = api_call(self.app, params)
46 response = api_call(self.app, params)
47
47
48 expected = {
48 expected = {
49 "msg": "Updated pull request `{}`".format(
49 "msg": "Updated pull request `{}`".format(
50 pull_request.pull_request_id),
50 pull_request.pull_request_id),
51 "pull_request": response.json['result']['pull_request'],
51 "pull_request": response.json['result']['pull_request'],
52 "updated_commits": {"added": [], "common": [], "removed": []},
52 "updated_commits": {"added": [], "common": [], "removed": []},
53 "updated_reviewers": {"added": [], "removed": []},
53 "updated_reviewers": {"added": [], "removed": []},
54 }
54 }
55
55
56 response_json = response.json['result']
56 response_json = response.json['result']
57 assert response_json == expected
57 assert response_json == expected
58 pr = response_json['pull_request']
58 pr = response_json['pull_request']
59 assert pr['title'] == 'New TITLE OF A PR'
59 assert pr['title'] == 'New TITLE OF A PR'
60 assert pr['description'] == 'New DESC OF A PR'
60 assert pr['description'] == 'New DESC OF A PR'
61
61
62 @pytest.mark.backends("git", "hg")
62 @pytest.mark.backends("git", "hg")
63 def test_api_try_update_closed_pull_request(
63 def test_api_try_update_closed_pull_request(
64 self, pr_util, no_notifications):
64 self, pr_util, no_notifications):
65 pull_request = pr_util.create_pull_request()
65 pull_request = pr_util.create_pull_request()
66 PullRequestModel().close_pull_request(
66 PullRequestModel().close_pull_request(
67 pull_request, TEST_USER_ADMIN_LOGIN)
67 pull_request, TEST_USER_ADMIN_LOGIN)
68
68
69 id_, params = build_data(
69 id_, params = build_data(
70 self.apikey, 'update_pull_request',
70 self.apikey, 'update_pull_request',
71 repoid=pull_request.target_repo.repo_name,
71 repoid=pull_request.target_repo.repo_name,
72 pullrequestid=pull_request.pull_request_id)
72 pullrequestid=pull_request.pull_request_id)
73 response = api_call(self.app, params)
73 response = api_call(self.app, params)
74
74
75 expected = 'pull request `{}` update failed, pull request ' \
75 expected = 'pull request `{}` update failed, pull request ' \
76 'is closed'.format(pull_request.pull_request_id)
76 'is closed'.format(pull_request.pull_request_id)
77
77
78 assert_error(id_, expected, response.body)
78 assert_error(id_, expected, response.body)
79
79
80 @pytest.mark.backends("git", "hg")
80 @pytest.mark.backends("git", "hg")
81 def test_api_update_update_commits(self, pr_util, no_notifications):
81 def test_api_update_update_commits(self, pr_util, no_notifications):
82 commits = [
82 commits = [
83 {'message': 'a'},
83 {'message': 'a'},
84 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
84 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
85 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
85 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
86 ]
86 ]
87 pull_request = pr_util.create_pull_request(
87 pull_request = pr_util.create_pull_request(
88 commits=commits, target_head='a', source_head='b', revisions=['b'])
88 commits=commits, target_head='a', source_head='b', revisions=['b'])
89 pr_util.update_source_repository(head='c')
89 pr_util.update_source_repository(head='c')
90 repo = pull_request.source_repo.scm_instance()
90 repo = pull_request.source_repo.scm_instance()
91 commits = [x for x in repo.get_commits()]
91 commits = [x for x in repo.get_commits()]
92 print commits
92 print commits
93
93
94 added_commit_id = commits[-1].raw_id # c commit
94 added_commit_id = commits[-1].raw_id # c commit
95 common_commit_id = commits[1].raw_id # b commit is common ancestor
95 common_commit_id = commits[1].raw_id # b commit is common ancestor
96 total_commits = [added_commit_id, common_commit_id]
96 total_commits = [added_commit_id, common_commit_id]
97
97
98 id_, params = build_data(
98 id_, params = build_data(
99 self.apikey, 'update_pull_request',
99 self.apikey, 'update_pull_request',
100 repoid=pull_request.target_repo.repo_name,
100 repoid=pull_request.target_repo.repo_name,
101 pullrequestid=pull_request.pull_request_id,
101 pullrequestid=pull_request.pull_request_id,
102 update_commits=True
102 update_commits=True
103 )
103 )
104 response = api_call(self.app, params)
104 response = api_call(self.app, params)
105
105
106 expected = {
106 expected = {
107 "msg": "Updated pull request `{}`".format(
107 "msg": "Updated pull request `{}`".format(
108 pull_request.pull_request_id),
108 pull_request.pull_request_id),
109 "pull_request": response.json['result']['pull_request'],
109 "pull_request": response.json['result']['pull_request'],
110 "updated_commits": {"added": [added_commit_id],
110 "updated_commits": {"added": [added_commit_id],
111 "common": [common_commit_id],
111 "common": [common_commit_id],
112 "total": total_commits,
112 "total": total_commits,
113 "removed": []},
113 "removed": []},
114 "updated_reviewers": {"added": [], "removed": []},
114 "updated_reviewers": {"added": [], "removed": []},
115 }
115 }
116
116
117 assert_ok(id_, expected, response.body)
117 assert_ok(id_, expected, response.body)
118
118
119 @pytest.mark.backends("git", "hg")
119 @pytest.mark.backends("git", "hg")
120 def test_api_update_change_reviewers(
120 def test_api_update_change_reviewers(
121 self, user_util, pr_util, no_notifications):
121 self, user_util, pr_util, no_notifications):
122 a = user_util.create_user()
122 a = user_util.create_user()
123 b = user_util.create_user()
123 b = user_util.create_user()
124 c = user_util.create_user()
124 c = user_util.create_user()
125 new_reviewers = [
125 new_reviewers = [
126 {'username': b.username,'reasons': ['updated via API'],
126 {'username': b.username,'reasons': ['updated via API'],
127 'mandatory':False},
127 'mandatory':False},
128 {'username': c.username, 'reasons': ['updated via API'],
128 {'username': c.username, 'reasons': ['updated via API'],
129 'mandatory':False},
129 'mandatory':False},
130 ]
130 ]
131
131
132 added = [b.username, c.username]
132 added = [b.username, c.username]
133 removed = [a.username]
133 removed = [a.username]
134
134
135 pull_request = pr_util.create_pull_request(
135 pull_request = pr_util.create_pull_request(
136 reviewers=[(a.username, ['added via API'], False)])
136 reviewers=[(a.username, ['added via API'], False, [])])
137
137
138 id_, params = build_data(
138 id_, params = build_data(
139 self.apikey, 'update_pull_request',
139 self.apikey, 'update_pull_request',
140 repoid=pull_request.target_repo.repo_name,
140 repoid=pull_request.target_repo.repo_name,
141 pullrequestid=pull_request.pull_request_id,
141 pullrequestid=pull_request.pull_request_id,
142 reviewers=new_reviewers)
142 reviewers=new_reviewers)
143 response = api_call(self.app, params)
143 response = api_call(self.app, params)
144 expected = {
144 expected = {
145 "msg": "Updated pull request `{}`".format(
145 "msg": "Updated pull request `{}`".format(
146 pull_request.pull_request_id),
146 pull_request.pull_request_id),
147 "pull_request": response.json['result']['pull_request'],
147 "pull_request": response.json['result']['pull_request'],
148 "updated_commits": {"added": [], "common": [], "removed": []},
148 "updated_commits": {"added": [], "common": [], "removed": []},
149 "updated_reviewers": {"added": added, "removed": removed},
149 "updated_reviewers": {"added": added, "removed": removed},
150 }
150 }
151
151
152 assert_ok(id_, expected, response.body)
152 assert_ok(id_, expected, response.body)
153
153
154 @pytest.mark.backends("git", "hg")
154 @pytest.mark.backends("git", "hg")
155 def test_api_update_bad_user_in_reviewers(self, pr_util):
155 def test_api_update_bad_user_in_reviewers(self, pr_util):
156 pull_request = pr_util.create_pull_request()
156 pull_request = pr_util.create_pull_request()
157
157
158 id_, params = build_data(
158 id_, params = build_data(
159 self.apikey, 'update_pull_request',
159 self.apikey, 'update_pull_request',
160 repoid=pull_request.target_repo.repo_name,
160 repoid=pull_request.target_repo.repo_name,
161 pullrequestid=pull_request.pull_request_id,
161 pullrequestid=pull_request.pull_request_id,
162 reviewers=[{'username': 'bad_name'}])
162 reviewers=[{'username': 'bad_name'}])
163 response = api_call(self.app, params)
163 response = api_call(self.app, params)
164
164
165 expected = 'user `bad_name` does not exist'
165 expected = 'user `bad_name` does not exist'
166
166
167 assert_error(id_, expected, response.body)
167 assert_error(id_, expected, response.body)
168
168
169 @pytest.mark.backends("git", "hg")
169 @pytest.mark.backends("git", "hg")
170 def test_api_update_repo_error(self, pr_util):
170 def test_api_update_repo_error(self, pr_util):
171 pull_request = pr_util.create_pull_request()
171 pull_request = pr_util.create_pull_request()
172 id_, params = build_data(
172 id_, params = build_data(
173 self.apikey, 'update_pull_request',
173 self.apikey, 'update_pull_request',
174 repoid='fake',
174 repoid='fake',
175 pullrequestid=pull_request.pull_request_id,
175 pullrequestid=pull_request.pull_request_id,
176 reviewers=[{'username': 'bad_name'}])
176 reviewers=[{'username': 'bad_name'}])
177 response = api_call(self.app, params)
177 response = api_call(self.app, params)
178
178
179 expected = 'repository `fake` does not exist'
179 expected = 'repository `fake` does not exist'
180
180
181 response_json = response.json['error']
181 response_json = response.json['error']
182 assert response_json == expected
182 assert response_json == expected
183
183
184 @pytest.mark.backends("git", "hg")
184 @pytest.mark.backends("git", "hg")
185 def test_api_update_pull_request_error(self, pr_util):
185 def test_api_update_pull_request_error(self, pr_util):
186 pull_request = pr_util.create_pull_request()
186 pull_request = pr_util.create_pull_request()
187
187
188 id_, params = build_data(
188 id_, params = build_data(
189 self.apikey, 'update_pull_request',
189 self.apikey, 'update_pull_request',
190 repoid=pull_request.target_repo.repo_name,
190 repoid=pull_request.target_repo.repo_name,
191 pullrequestid=999999,
191 pullrequestid=999999,
192 reviewers=[{'username': 'bad_name'}])
192 reviewers=[{'username': 'bad_name'}])
193 response = api_call(self.app, params)
193 response = api_call(self.app, params)
194
194
195 expected = 'pull request `999999` does not exist'
195 expected = 'pull request `999999` does not exist'
196 assert_error(id_, expected, response.body)
196 assert_error(id_, expected, response.body)
197
197
198 @pytest.mark.backends("git", "hg")
198 @pytest.mark.backends("git", "hg")
199 def test_api_update_pull_request_no_perms_to_update(
199 def test_api_update_pull_request_no_perms_to_update(
200 self, user_util, pr_util):
200 self, user_util, pr_util):
201 user = user_util.create_user()
201 user = user_util.create_user()
202 pull_request = pr_util.create_pull_request()
202 pull_request = pr_util.create_pull_request()
203
203
204 id_, params = build_data(
204 id_, params = build_data(
205 user.api_key, 'update_pull_request',
205 user.api_key, 'update_pull_request',
206 repoid=pull_request.target_repo.repo_name,
206 repoid=pull_request.target_repo.repo_name,
207 pullrequestid=pull_request.pull_request_id,)
207 pullrequestid=pull_request.pull_request_id,)
208 response = api_call(self.app, params)
208 response = api_call(self.app, params)
209
209
210 expected = ('pull request `%s` update failed, '
210 expected = ('pull request `%s` update failed, '
211 'no permission to update.') % pull_request.pull_request_id
211 'no permission to update.') % pull_request.pull_request_id
212
212
213 assert_error(id_, expected, response.body)
213 assert_error(id_, expected, response.body)
@@ -1,248 +1,247 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import formencode
23 import formencode
24 import formencode.htmlfill
24 import formencode.htmlfill
25
25
26 from pyramid.httpexceptions import HTTPFound
26 from pyramid.httpexceptions import HTTPFound
27 from pyramid.view import view_config
27 from pyramid.view import view_config
28 from pyramid.response import Response
28 from pyramid.response import Response
29 from pyramid.renderers import render
29 from pyramid.renderers import render
30
30
31 from rhodecode.apps._base import BaseAppView, DataGridAppView
31 from rhodecode.apps._base import BaseAppView, DataGridAppView
32 from rhodecode.lib.auth import (
32 from rhodecode.lib.auth import (
33 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
33 LoginRequired, NotAnonymous, CSRFRequired, HasPermissionAnyDecorator)
34 from rhodecode.lib import helpers as h, audit_logger
34 from rhodecode.lib import helpers as h, audit_logger
35 from rhodecode.lib.utils2 import safe_unicode
35 from rhodecode.lib.utils2 import safe_unicode
36
36
37 from rhodecode.model.forms import UserGroupForm
37 from rhodecode.model.forms import UserGroupForm
38 from rhodecode.model.permission import PermissionModel
38 from rhodecode.model.permission import PermissionModel
39 from rhodecode.model.scm import UserGroupList
39 from rhodecode.model.scm import UserGroupList
40 from rhodecode.model.db import (
40 from rhodecode.model.db import (
41 or_, count, User, UserGroup, UserGroupMember)
41 or_, count, User, UserGroup, UserGroupMember)
42 from rhodecode.model.meta import Session
42 from rhodecode.model.meta import Session
43 from rhodecode.model.user_group import UserGroupModel
43 from rhodecode.model.user_group import UserGroupModel
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 class AdminUserGroupsView(BaseAppView, DataGridAppView):
48 class AdminUserGroupsView(BaseAppView, DataGridAppView):
49
49
50 def load_default_context(self):
50 def load_default_context(self):
51 c = self._get_local_tmpl_context()
51 c = self._get_local_tmpl_context()
52
52
53 PermissionModel().set_global_permission_choices(
53 PermissionModel().set_global_permission_choices(
54 c, gettext_translator=self.request.translate)
54 c, gettext_translator=self.request.translate)
55
55
56
57 return c
56 return c
58
57
59 # permission check in data loading of
58 # permission check in data loading of
60 # `user_groups_list_data` via UserGroupList
59 # `user_groups_list_data` via UserGroupList
61 @LoginRequired()
60 @LoginRequired()
62 @NotAnonymous()
61 @NotAnonymous()
63 @view_config(
62 @view_config(
64 route_name='user_groups', request_method='GET',
63 route_name='user_groups', request_method='GET',
65 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
64 renderer='rhodecode:templates/admin/user_groups/user_groups.mako')
66 def user_groups_list(self):
65 def user_groups_list(self):
67 c = self.load_default_context()
66 c = self.load_default_context()
68 return self._get_template_context(c)
67 return self._get_template_context(c)
69
68
70 # permission check inside
69 # permission check inside
71 @LoginRequired()
70 @LoginRequired()
72 @NotAnonymous()
71 @NotAnonymous()
73 @view_config(
72 @view_config(
74 route_name='user_groups_data', request_method='GET',
73 route_name='user_groups_data', request_method='GET',
75 renderer='json_ext', xhr=True)
74 renderer='json_ext', xhr=True)
76 def user_groups_list_data(self):
75 def user_groups_list_data(self):
77 self.load_default_context()
76 self.load_default_context()
78 column_map = {
77 column_map = {
79 'active': 'users_group_active',
78 'active': 'users_group_active',
80 'description': 'user_group_description',
79 'description': 'user_group_description',
81 'members': 'members_total',
80 'members': 'members_total',
82 'owner': 'user_username',
81 'owner': 'user_username',
83 'sync': 'group_data'
82 'sync': 'group_data'
84 }
83 }
85 draw, start, limit = self._extract_chunk(self.request)
84 draw, start, limit = self._extract_chunk(self.request)
86 search_q, order_by, order_dir = self._extract_ordering(
85 search_q, order_by, order_dir = self._extract_ordering(
87 self.request, column_map=column_map)
86 self.request, column_map=column_map)
88
87
89 _render = self.request.get_partial_renderer(
88 _render = self.request.get_partial_renderer(
90 'rhodecode:templates/data_table/_dt_elements.mako')
89 'rhodecode:templates/data_table/_dt_elements.mako')
91
90
92 def user_group_name(user_group_id, user_group_name):
91 def user_group_name(user_group_id, user_group_name):
93 return _render("user_group_name", user_group_id, user_group_name)
92 return _render("user_group_name", user_group_id, user_group_name)
94
93
95 def user_group_actions(user_group_id, user_group_name):
94 def user_group_actions(user_group_id, user_group_name):
96 return _render("user_group_actions", user_group_id, user_group_name)
95 return _render("user_group_actions", user_group_id, user_group_name)
97
96
98 def user_profile(username):
97 def user_profile(username):
99 return _render('user_profile', username)
98 return _render('user_profile', username)
100
99
101 auth_user_group_list = UserGroupList(
100 auth_user_group_list = UserGroupList(
102 UserGroup.query().all(), perm_set=['usergroup.admin'])
101 UserGroup.query().all(), perm_set=['usergroup.admin'])
103
102
104 allowed_ids = [-1]
103 allowed_ids = [-1]
105 for user_group in auth_user_group_list:
104 for user_group in auth_user_group_list:
106 allowed_ids.append(user_group.users_group_id)
105 allowed_ids.append(user_group.users_group_id)
107
106
108 user_groups_data_total_count = UserGroup.query()\
107 user_groups_data_total_count = UserGroup.query()\
109 .filter(UserGroup.users_group_id.in_(allowed_ids))\
108 .filter(UserGroup.users_group_id.in_(allowed_ids))\
110 .count()
109 .count()
111
110
112 member_count = count(UserGroupMember.user_id)
111 member_count = count(UserGroupMember.user_id)
113 base_q = Session.query(
112 base_q = Session.query(
114 UserGroup.users_group_name,
113 UserGroup.users_group_name,
115 UserGroup.user_group_description,
114 UserGroup.user_group_description,
116 UserGroup.users_group_active,
115 UserGroup.users_group_active,
117 UserGroup.users_group_id,
116 UserGroup.users_group_id,
118 UserGroup.group_data,
117 UserGroup.group_data,
119 User,
118 User,
120 member_count.label('member_count')
119 member_count.label('member_count')
121 ) \
120 ) \
122 .filter(UserGroup.users_group_id.in_(allowed_ids)) \
121 .filter(UserGroup.users_group_id.in_(allowed_ids)) \
123 .outerjoin(UserGroupMember) \
122 .outerjoin(UserGroupMember) \
124 .join(User, User.user_id == UserGroup.user_id) \
123 .join(User, User.user_id == UserGroup.user_id) \
125 .group_by(UserGroup, User)
124 .group_by(UserGroup, User)
126
125
127 if search_q:
126 if search_q:
128 like_expression = u'%{}%'.format(safe_unicode(search_q))
127 like_expression = u'%{}%'.format(safe_unicode(search_q))
129 base_q = base_q.filter(or_(
128 base_q = base_q.filter(or_(
130 UserGroup.users_group_name.ilike(like_expression),
129 UserGroup.users_group_name.ilike(like_expression),
131 ))
130 ))
132
131
133 user_groups_data_total_filtered_count = base_q.count()
132 user_groups_data_total_filtered_count = base_q.count()
134
133
135 if order_by == 'members_total':
134 if order_by == 'members_total':
136 sort_col = member_count
135 sort_col = member_count
137 elif order_by == 'user_username':
136 elif order_by == 'user_username':
138 sort_col = User.username
137 sort_col = User.username
139 else:
138 else:
140 sort_col = getattr(UserGroup, order_by, None)
139 sort_col = getattr(UserGroup, order_by, None)
141
140
142 if isinstance(sort_col, count) or sort_col:
141 if isinstance(sort_col, count) or sort_col:
143 if order_dir == 'asc':
142 if order_dir == 'asc':
144 sort_col = sort_col.asc()
143 sort_col = sort_col.asc()
145 else:
144 else:
146 sort_col = sort_col.desc()
145 sort_col = sort_col.desc()
147
146
148 base_q = base_q.order_by(sort_col)
147 base_q = base_q.order_by(sort_col)
149 base_q = base_q.offset(start).limit(limit)
148 base_q = base_q.offset(start).limit(limit)
150
149
151 # authenticated access to user groups
150 # authenticated access to user groups
152 auth_user_group_list = base_q.all()
151 auth_user_group_list = base_q.all()
153
152
154 user_groups_data = []
153 user_groups_data = []
155 for user_gr in auth_user_group_list:
154 for user_gr in auth_user_group_list:
156 user_groups_data.append({
155 user_groups_data.append({
157 "users_group_name": user_group_name(
156 "users_group_name": user_group_name(
158 user_gr.users_group_id, h.escape(user_gr.users_group_name)),
157 user_gr.users_group_id, h.escape(user_gr.users_group_name)),
159 "name_raw": h.escape(user_gr.users_group_name),
158 "name_raw": h.escape(user_gr.users_group_name),
160 "description": h.escape(user_gr.user_group_description),
159 "description": h.escape(user_gr.user_group_description),
161 "members": user_gr.member_count,
160 "members": user_gr.member_count,
162 # NOTE(marcink): because of advanced query we
161 # NOTE(marcink): because of advanced query we
163 # need to load it like that
162 # need to load it like that
164 "sync": UserGroup._load_group_data(
163 "sync": UserGroup._load_group_data(
165 user_gr.group_data).get('extern_type'),
164 user_gr.group_data).get('extern_type'),
166 "active": h.bool2icon(user_gr.users_group_active),
165 "active": h.bool2icon(user_gr.users_group_active),
167 "owner": user_profile(user_gr.User.username),
166 "owner": user_profile(user_gr.User.username),
168 "action": user_group_actions(
167 "action": user_group_actions(
169 user_gr.users_group_id, user_gr.users_group_name)
168 user_gr.users_group_id, user_gr.users_group_name)
170 })
169 })
171
170
172 data = ({
171 data = ({
173 'draw': draw,
172 'draw': draw,
174 'data': user_groups_data,
173 'data': user_groups_data,
175 'recordsTotal': user_groups_data_total_count,
174 'recordsTotal': user_groups_data_total_count,
176 'recordsFiltered': user_groups_data_total_filtered_count,
175 'recordsFiltered': user_groups_data_total_filtered_count,
177 })
176 })
178
177
179 return data
178 return data
180
179
181 @LoginRequired()
180 @LoginRequired()
182 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
181 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
183 @view_config(
182 @view_config(
184 route_name='user_groups_new', request_method='GET',
183 route_name='user_groups_new', request_method='GET',
185 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
184 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
186 def user_groups_new(self):
185 def user_groups_new(self):
187 c = self.load_default_context()
186 c = self.load_default_context()
188 return self._get_template_context(c)
187 return self._get_template_context(c)
189
188
190 @LoginRequired()
189 @LoginRequired()
191 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
190 @HasPermissionAnyDecorator('hg.admin', 'hg.usergroup.create.true')
192 @CSRFRequired()
191 @CSRFRequired()
193 @view_config(
192 @view_config(
194 route_name='user_groups_create', request_method='POST',
193 route_name='user_groups_create', request_method='POST',
195 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
194 renderer='rhodecode:templates/admin/user_groups/user_group_add.mako')
196 def user_groups_create(self):
195 def user_groups_create(self):
197 _ = self.request.translate
196 _ = self.request.translate
198 c = self.load_default_context()
197 c = self.load_default_context()
199 users_group_form = UserGroupForm(self.request.translate)()
198 users_group_form = UserGroupForm(self.request.translate)()
200
199
201 user_group_name = self.request.POST.get('users_group_name')
200 user_group_name = self.request.POST.get('users_group_name')
202 try:
201 try:
203 form_result = users_group_form.to_python(dict(self.request.POST))
202 form_result = users_group_form.to_python(dict(self.request.POST))
204 user_group = UserGroupModel().create(
203 user_group = UserGroupModel().create(
205 name=form_result['users_group_name'],
204 name=form_result['users_group_name'],
206 description=form_result['user_group_description'],
205 description=form_result['user_group_description'],
207 owner=self._rhodecode_user.user_id,
206 owner=self._rhodecode_user.user_id,
208 active=form_result['users_group_active'])
207 active=form_result['users_group_active'])
209 Session().flush()
208 Session().flush()
210 creation_data = user_group.get_api_data()
209 creation_data = user_group.get_api_data()
211 user_group_name = form_result['users_group_name']
210 user_group_name = form_result['users_group_name']
212
211
213 audit_logger.store_web(
212 audit_logger.store_web(
214 'user_group.create', action_data={'data': creation_data},
213 'user_group.create', action_data={'data': creation_data},
215 user=self._rhodecode_user)
214 user=self._rhodecode_user)
216
215
217 user_group_link = h.link_to(
216 user_group_link = h.link_to(
218 h.escape(user_group_name),
217 h.escape(user_group_name),
219 h.route_path(
218 h.route_path(
220 'edit_user_group', user_group_id=user_group.users_group_id))
219 'edit_user_group', user_group_id=user_group.users_group_id))
221 h.flash(h.literal(_('Created user group %(user_group_link)s')
220 h.flash(h.literal(_('Created user group %(user_group_link)s')
222 % {'user_group_link': user_group_link}),
221 % {'user_group_link': user_group_link}),
223 category='success')
222 category='success')
224 Session().commit()
223 Session().commit()
225 user_group_id = user_group.users_group_id
224 user_group_id = user_group.users_group_id
226 except formencode.Invalid as errors:
225 except formencode.Invalid as errors:
227
226
228 data = render(
227 data = render(
229 'rhodecode:templates/admin/user_groups/user_group_add.mako',
228 'rhodecode:templates/admin/user_groups/user_group_add.mako',
230 self._get_template_context(c), self.request)
229 self._get_template_context(c), self.request)
231 html = formencode.htmlfill.render(
230 html = formencode.htmlfill.render(
232 data,
231 data,
233 defaults=errors.value,
232 defaults=errors.value,
234 errors=errors.error_dict or {},
233 errors=errors.error_dict or {},
235 prefix_error=False,
234 prefix_error=False,
236 encoding="UTF-8",
235 encoding="UTF-8",
237 force_defaults=False
236 force_defaults=False
238 )
237 )
239 return Response(html)
238 return Response(html)
240
239
241 except Exception:
240 except Exception:
242 log.exception("Exception creating user group")
241 log.exception("Exception creating user group")
243 h.flash(_('Error occurred during creation of user group %s') \
242 h.flash(_('Error occurred during creation of user group %s') \
244 % user_group_name, category='error')
243 % user_group_name, category='error')
245 raise HTTPFound(h.route_path('user_groups_new'))
244 raise HTTPFound(h.route_path('user_groups_new'))
246
245
247 raise HTTPFound(
246 raise HTTPFound(
248 h.route_path('edit_user_group', user_group_id=user_group_id))
247 h.route_path('edit_user_group', user_group_id=user_group_id))
@@ -1,1134 +1,1140 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 import rhodecode
23 import rhodecode
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
24 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
25 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.lib.vcs.nodes import FileNode
26 from rhodecode.lib import helpers as h
26 from rhodecode.lib import helpers as h
27 from rhodecode.model.changeset_status import ChangesetStatusModel
27 from rhodecode.model.changeset_status import ChangesetStatusModel
28 from rhodecode.model.db import (
28 from rhodecode.model.db import (
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
29 PullRequest, ChangesetStatus, UserLog, Notification, ChangesetComment)
30 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
31 from rhodecode.model.pull_request import PullRequestModel
31 from rhodecode.model.pull_request import PullRequestModel
32 from rhodecode.model.user import UserModel
32 from rhodecode.model.user import UserModel
33 from rhodecode.tests import (
33 from rhodecode.tests import (
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
34 assert_session_flash, TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN)
35 from rhodecode.tests.utils import AssertResponse
35 from rhodecode.tests.utils import AssertResponse
36
36
37
37
38 def route_path(name, params=None, **kwargs):
38 def route_path(name, params=None, **kwargs):
39 import urllib
39 import urllib
40
40
41 base_url = {
41 base_url = {
42 'repo_changelog': '/{repo_name}/changelog',
42 'repo_changelog': '/{repo_name}/changelog',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
43 'repo_changelog_file': '/{repo_name}/changelog/{commit_id}/{f_path}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
44 'pullrequest_show': '/{repo_name}/pull-request/{pull_request_id}',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
45 'pullrequest_show_all': '/{repo_name}/pull-request',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
46 'pullrequest_show_all_data': '/{repo_name}/pull-request-data',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
47 'pullrequest_repo_refs': '/{repo_name}/pull-request/refs/{target_repo_name:.*?[^/]}',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
48 'pullrequest_repo_destinations': '/{repo_name}/pull-request/repo-destinations',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
49 'pullrequest_new': '/{repo_name}/pull-request/new',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
50 'pullrequest_create': '/{repo_name}/pull-request/create',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
51 'pullrequest_update': '/{repo_name}/pull-request/{pull_request_id}/update',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
52 'pullrequest_merge': '/{repo_name}/pull-request/{pull_request_id}/merge',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
53 'pullrequest_delete': '/{repo_name}/pull-request/{pull_request_id}/delete',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
54 'pullrequest_comment_create': '/{repo_name}/pull-request/{pull_request_id}/comment',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
55 'pullrequest_comment_delete': '/{repo_name}/pull-request/{pull_request_id}/comment/{comment_id}/delete',
56 }[name].format(**kwargs)
56 }[name].format(**kwargs)
57
57
58 if params:
58 if params:
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
59 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
60 return base_url
60 return base_url
61
61
62
62
63 @pytest.mark.usefixtures('app', 'autologin_user')
63 @pytest.mark.usefixtures('app', 'autologin_user')
64 @pytest.mark.backends("git", "hg")
64 @pytest.mark.backends("git", "hg")
65 class TestPullrequestsView(object):
65 class TestPullrequestsView(object):
66
66
67 def test_index(self, backend):
67 def test_index(self, backend):
68 self.app.get(route_path(
68 self.app.get(route_path(
69 'pullrequest_new',
69 'pullrequest_new',
70 repo_name=backend.repo_name))
70 repo_name=backend.repo_name))
71
71
72 def test_option_menu_create_pull_request_exists(self, backend):
72 def test_option_menu_create_pull_request_exists(self, backend):
73 repo_name = backend.repo_name
73 repo_name = backend.repo_name
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
74 response = self.app.get(h.route_path('repo_summary', repo_name=repo_name))
75
75
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
76 create_pr_link = '<a href="%s">Create Pull Request</a>' % route_path(
77 'pullrequest_new', repo_name=repo_name)
77 'pullrequest_new', repo_name=repo_name)
78 response.mustcontain(create_pr_link)
78 response.mustcontain(create_pr_link)
79
79
80 def test_create_pr_form_with_raw_commit_id(self, backend):
80 def test_create_pr_form_with_raw_commit_id(self, backend):
81 repo = backend.repo
81 repo = backend.repo
82
82
83 self.app.get(
83 self.app.get(
84 route_path('pullrequest_new',
84 route_path('pullrequest_new',
85 repo_name=repo.repo_name,
85 repo_name=repo.repo_name,
86 commit=repo.get_commit().raw_id),
86 commit=repo.get_commit().raw_id),
87 status=200)
87 status=200)
88
88
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
89 @pytest.mark.parametrize('pr_merge_enabled', [True, False])
90 def test_show(self, pr_util, pr_merge_enabled):
90 def test_show(self, pr_util, pr_merge_enabled):
91 pull_request = pr_util.create_pull_request(
91 pull_request = pr_util.create_pull_request(
92 mergeable=pr_merge_enabled, enable_notifications=False)
92 mergeable=pr_merge_enabled, enable_notifications=False)
93
93
94 response = self.app.get(route_path(
94 response = self.app.get(route_path(
95 'pullrequest_show',
95 'pullrequest_show',
96 repo_name=pull_request.target_repo.scm_instance().name,
96 repo_name=pull_request.target_repo.scm_instance().name,
97 pull_request_id=pull_request.pull_request_id))
97 pull_request_id=pull_request.pull_request_id))
98
98
99 for commit_id in pull_request.revisions:
99 for commit_id in pull_request.revisions:
100 response.mustcontain(commit_id)
100 response.mustcontain(commit_id)
101
101
102 assert pull_request.target_ref_parts.type in response
102 assert pull_request.target_ref_parts.type in response
103 assert pull_request.target_ref_parts.name in response
103 assert pull_request.target_ref_parts.name in response
104 target_clone_url = pull_request.target_repo.clone_url()
104 target_clone_url = pull_request.target_repo.clone_url()
105 assert target_clone_url in response
105 assert target_clone_url in response
106
106
107 assert 'class="pull-request-merge"' in response
107 assert 'class="pull-request-merge"' in response
108 assert (
108 assert (
109 'Server-side pull request merging is disabled.'
109 'Server-side pull request merging is disabled.'
110 in response) != pr_merge_enabled
110 in response) != pr_merge_enabled
111
111
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
112 def test_close_status_visibility(self, pr_util, user_util, csrf_token):
113 # Logout
113 # Logout
114 response = self.app.post(
114 response = self.app.post(
115 h.route_path('logout'),
115 h.route_path('logout'),
116 params={'csrf_token': csrf_token})
116 params={'csrf_token': csrf_token})
117 # Login as regular user
117 # Login as regular user
118 response = self.app.post(h.route_path('login'),
118 response = self.app.post(h.route_path('login'),
119 {'username': TEST_USER_REGULAR_LOGIN,
119 {'username': TEST_USER_REGULAR_LOGIN,
120 'password': 'test12'})
120 'password': 'test12'})
121
121
122 pull_request = pr_util.create_pull_request(
122 pull_request = pr_util.create_pull_request(
123 author=TEST_USER_REGULAR_LOGIN)
123 author=TEST_USER_REGULAR_LOGIN)
124
124
125 response = self.app.get(route_path(
125 response = self.app.get(route_path(
126 'pullrequest_show',
126 'pullrequest_show',
127 repo_name=pull_request.target_repo.scm_instance().name,
127 repo_name=pull_request.target_repo.scm_instance().name,
128 pull_request_id=pull_request.pull_request_id))
128 pull_request_id=pull_request.pull_request_id))
129
129
130 response.mustcontain('Server-side pull request merging is disabled.')
130 response.mustcontain('Server-side pull request merging is disabled.')
131
131
132 assert_response = response.assert_response()
132 assert_response = response.assert_response()
133 # for regular user without a merge permissions, we don't see it
133 # for regular user without a merge permissions, we don't see it
134 assert_response.no_element_exists('#close-pull-request-action')
134 assert_response.no_element_exists('#close-pull-request-action')
135
135
136 user_util.grant_user_permission_to_repo(
136 user_util.grant_user_permission_to_repo(
137 pull_request.target_repo,
137 pull_request.target_repo,
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
138 UserModel().get_by_username(TEST_USER_REGULAR_LOGIN),
139 'repository.write')
139 'repository.write')
140 response = self.app.get(route_path(
140 response = self.app.get(route_path(
141 'pullrequest_show',
141 'pullrequest_show',
142 repo_name=pull_request.target_repo.scm_instance().name,
142 repo_name=pull_request.target_repo.scm_instance().name,
143 pull_request_id=pull_request.pull_request_id))
143 pull_request_id=pull_request.pull_request_id))
144
144
145 response.mustcontain('Server-side pull request merging is disabled.')
145 response.mustcontain('Server-side pull request merging is disabled.')
146
146
147 assert_response = response.assert_response()
147 assert_response = response.assert_response()
148 # now regular user has a merge permissions, we have CLOSE button
148 # now regular user has a merge permissions, we have CLOSE button
149 assert_response.one_element_exists('#close-pull-request-action')
149 assert_response.one_element_exists('#close-pull-request-action')
150
150
151 def test_show_invalid_commit_id(self, pr_util):
151 def test_show_invalid_commit_id(self, pr_util):
152 # Simulating invalid revisions which will cause a lookup error
152 # Simulating invalid revisions which will cause a lookup error
153 pull_request = pr_util.create_pull_request()
153 pull_request = pr_util.create_pull_request()
154 pull_request.revisions = ['invalid']
154 pull_request.revisions = ['invalid']
155 Session().add(pull_request)
155 Session().add(pull_request)
156 Session().commit()
156 Session().commit()
157
157
158 response = self.app.get(route_path(
158 response = self.app.get(route_path(
159 'pullrequest_show',
159 'pullrequest_show',
160 repo_name=pull_request.target_repo.scm_instance().name,
160 repo_name=pull_request.target_repo.scm_instance().name,
161 pull_request_id=pull_request.pull_request_id))
161 pull_request_id=pull_request.pull_request_id))
162
162
163 for commit_id in pull_request.revisions:
163 for commit_id in pull_request.revisions:
164 response.mustcontain(commit_id)
164 response.mustcontain(commit_id)
165
165
166 def test_show_invalid_source_reference(self, pr_util):
166 def test_show_invalid_source_reference(self, pr_util):
167 pull_request = pr_util.create_pull_request()
167 pull_request = pr_util.create_pull_request()
168 pull_request.source_ref = 'branch:b:invalid'
168 pull_request.source_ref = 'branch:b:invalid'
169 Session().add(pull_request)
169 Session().add(pull_request)
170 Session().commit()
170 Session().commit()
171
171
172 self.app.get(route_path(
172 self.app.get(route_path(
173 'pullrequest_show',
173 'pullrequest_show',
174 repo_name=pull_request.target_repo.scm_instance().name,
174 repo_name=pull_request.target_repo.scm_instance().name,
175 pull_request_id=pull_request.pull_request_id))
175 pull_request_id=pull_request.pull_request_id))
176
176
177 def test_edit_title_description(self, pr_util, csrf_token):
177 def test_edit_title_description(self, pr_util, csrf_token):
178 pull_request = pr_util.create_pull_request()
178 pull_request = pr_util.create_pull_request()
179 pull_request_id = pull_request.pull_request_id
179 pull_request_id = pull_request.pull_request_id
180
180
181 response = self.app.post(
181 response = self.app.post(
182 route_path('pullrequest_update',
182 route_path('pullrequest_update',
183 repo_name=pull_request.target_repo.repo_name,
183 repo_name=pull_request.target_repo.repo_name,
184 pull_request_id=pull_request_id),
184 pull_request_id=pull_request_id),
185 params={
185 params={
186 'edit_pull_request': 'true',
186 'edit_pull_request': 'true',
187 'title': 'New title',
187 'title': 'New title',
188 'description': 'New description',
188 'description': 'New description',
189 'csrf_token': csrf_token})
189 'csrf_token': csrf_token})
190
190
191 assert_session_flash(
191 assert_session_flash(
192 response, u'Pull request title & description updated.',
192 response, u'Pull request title & description updated.',
193 category='success')
193 category='success')
194
194
195 pull_request = PullRequest.get(pull_request_id)
195 pull_request = PullRequest.get(pull_request_id)
196 assert pull_request.title == 'New title'
196 assert pull_request.title == 'New title'
197 assert pull_request.description == 'New description'
197 assert pull_request.description == 'New description'
198
198
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
199 def test_edit_title_description_closed(self, pr_util, csrf_token):
200 pull_request = pr_util.create_pull_request()
200 pull_request = pr_util.create_pull_request()
201 pull_request_id = pull_request.pull_request_id
201 pull_request_id = pull_request.pull_request_id
202 repo_name = pull_request.target_repo.repo_name
202 repo_name = pull_request.target_repo.repo_name
203 pr_util.close()
203 pr_util.close()
204
204
205 response = self.app.post(
205 response = self.app.post(
206 route_path('pullrequest_update',
206 route_path('pullrequest_update',
207 repo_name=repo_name, pull_request_id=pull_request_id),
207 repo_name=repo_name, pull_request_id=pull_request_id),
208 params={
208 params={
209 'edit_pull_request': 'true',
209 'edit_pull_request': 'true',
210 'title': 'New title',
210 'title': 'New title',
211 'description': 'New description',
211 'description': 'New description',
212 'csrf_token': csrf_token}, status=200)
212 'csrf_token': csrf_token}, status=200)
213 assert_session_flash(
213 assert_session_flash(
214 response, u'Cannot update closed pull requests.',
214 response, u'Cannot update closed pull requests.',
215 category='error')
215 category='error')
216
216
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
217 def test_update_invalid_source_reference(self, pr_util, csrf_token):
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
218 from rhodecode.lib.vcs.backends.base import UpdateFailureReason
219
219
220 pull_request = pr_util.create_pull_request()
220 pull_request = pr_util.create_pull_request()
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
221 pull_request.source_ref = 'branch:invalid-branch:invalid-commit-id'
222 Session().add(pull_request)
222 Session().add(pull_request)
223 Session().commit()
223 Session().commit()
224
224
225 pull_request_id = pull_request.pull_request_id
225 pull_request_id = pull_request.pull_request_id
226
226
227 response = self.app.post(
227 response = self.app.post(
228 route_path('pullrequest_update',
228 route_path('pullrequest_update',
229 repo_name=pull_request.target_repo.repo_name,
229 repo_name=pull_request.target_repo.repo_name,
230 pull_request_id=pull_request_id),
230 pull_request_id=pull_request_id),
231 params={'update_commits': 'true',
231 params={'update_commits': 'true',
232 'csrf_token': csrf_token})
232 'csrf_token': csrf_token})
233
233
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
234 expected_msg = str(PullRequestModel.UPDATE_STATUS_MESSAGES[
235 UpdateFailureReason.MISSING_SOURCE_REF])
235 UpdateFailureReason.MISSING_SOURCE_REF])
236 assert_session_flash(response, expected_msg, category='error')
236 assert_session_flash(response, expected_msg, category='error')
237
237
238 def test_missing_target_reference(self, pr_util, csrf_token):
238 def test_missing_target_reference(self, pr_util, csrf_token):
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
239 from rhodecode.lib.vcs.backends.base import MergeFailureReason
240 pull_request = pr_util.create_pull_request(
240 pull_request = pr_util.create_pull_request(
241 approved=True, mergeable=True)
241 approved=True, mergeable=True)
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
242 pull_request.target_ref = 'branch:invalid-branch:invalid-commit-id'
243 Session().add(pull_request)
243 Session().add(pull_request)
244 Session().commit()
244 Session().commit()
245
245
246 pull_request_id = pull_request.pull_request_id
246 pull_request_id = pull_request.pull_request_id
247 pull_request_url = route_path(
247 pull_request_url = route_path(
248 'pullrequest_show',
248 'pullrequest_show',
249 repo_name=pull_request.target_repo.repo_name,
249 repo_name=pull_request.target_repo.repo_name,
250 pull_request_id=pull_request_id)
250 pull_request_id=pull_request_id)
251
251
252 response = self.app.get(pull_request_url)
252 response = self.app.get(pull_request_url)
253
253
254 assertr = AssertResponse(response)
254 assertr = AssertResponse(response)
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
255 expected_msg = PullRequestModel.MERGE_STATUS_MESSAGES[
256 MergeFailureReason.MISSING_TARGET_REF]
256 MergeFailureReason.MISSING_TARGET_REF]
257 assertr.element_contains(
257 assertr.element_contains(
258 'span[data-role="merge-message"]', str(expected_msg))
258 'span[data-role="merge-message"]', str(expected_msg))
259
259
260 def test_comment_and_close_pull_request_custom_message_approved(
260 def test_comment_and_close_pull_request_custom_message_approved(
261 self, pr_util, csrf_token, xhr_header):
261 self, pr_util, csrf_token, xhr_header):
262
262
263 pull_request = pr_util.create_pull_request(approved=True)
263 pull_request = pr_util.create_pull_request(approved=True)
264 pull_request_id = pull_request.pull_request_id
264 pull_request_id = pull_request.pull_request_id
265 author = pull_request.user_id
265 author = pull_request.user_id
266 repo = pull_request.target_repo.repo_id
266 repo = pull_request.target_repo.repo_id
267
267
268 self.app.post(
268 self.app.post(
269 route_path('pullrequest_comment_create',
269 route_path('pullrequest_comment_create',
270 repo_name=pull_request.target_repo.scm_instance().name,
270 repo_name=pull_request.target_repo.scm_instance().name,
271 pull_request_id=pull_request_id),
271 pull_request_id=pull_request_id),
272 params={
272 params={
273 'close_pull_request': '1',
273 'close_pull_request': '1',
274 'text': 'Closing a PR',
274 'text': 'Closing a PR',
275 'csrf_token': csrf_token},
275 'csrf_token': csrf_token},
276 extra_environ=xhr_header,)
276 extra_environ=xhr_header,)
277
277
278 journal = UserLog.query()\
278 journal = UserLog.query()\
279 .filter(UserLog.user_id == author)\
279 .filter(UserLog.user_id == author)\
280 .filter(UserLog.repository_id == repo) \
280 .filter(UserLog.repository_id == repo) \
281 .order_by('user_log_id') \
281 .order_by('user_log_id') \
282 .all()
282 .all()
283 assert journal[-1].action == 'repo.pull_request.close'
283 assert journal[-1].action == 'repo.pull_request.close'
284
284
285 pull_request = PullRequest.get(pull_request_id)
285 pull_request = PullRequest.get(pull_request_id)
286 assert pull_request.is_closed()
286 assert pull_request.is_closed()
287
287
288 status = ChangesetStatusModel().get_status(
288 status = ChangesetStatusModel().get_status(
289 pull_request.source_repo, pull_request=pull_request)
289 pull_request.source_repo, pull_request=pull_request)
290 assert status == ChangesetStatus.STATUS_APPROVED
290 assert status == ChangesetStatus.STATUS_APPROVED
291 comments = ChangesetComment().query() \
291 comments = ChangesetComment().query() \
292 .filter(ChangesetComment.pull_request == pull_request) \
292 .filter(ChangesetComment.pull_request == pull_request) \
293 .order_by(ChangesetComment.comment_id.asc())\
293 .order_by(ChangesetComment.comment_id.asc())\
294 .all()
294 .all()
295 assert comments[-1].text == 'Closing a PR'
295 assert comments[-1].text == 'Closing a PR'
296
296
297 def test_comment_force_close_pull_request_rejected(
297 def test_comment_force_close_pull_request_rejected(
298 self, pr_util, csrf_token, xhr_header):
298 self, pr_util, csrf_token, xhr_header):
299 pull_request = pr_util.create_pull_request()
299 pull_request = pr_util.create_pull_request()
300 pull_request_id = pull_request.pull_request_id
300 pull_request_id = pull_request.pull_request_id
301 PullRequestModel().update_reviewers(
301 PullRequestModel().update_reviewers(
302 pull_request_id, [(1, ['reason'], False), (2, ['reason2'], False)],
302 pull_request_id, [(1, ['reason'], False, []), (2, ['reason2'], False, [])],
303 pull_request.author)
303 pull_request.author)
304 author = pull_request.user_id
304 author = pull_request.user_id
305 repo = pull_request.target_repo.repo_id
305 repo = pull_request.target_repo.repo_id
306
306
307 self.app.post(
307 self.app.post(
308 route_path('pullrequest_comment_create',
308 route_path('pullrequest_comment_create',
309 repo_name=pull_request.target_repo.scm_instance().name,
309 repo_name=pull_request.target_repo.scm_instance().name,
310 pull_request_id=pull_request_id),
310 pull_request_id=pull_request_id),
311 params={
311 params={
312 'close_pull_request': '1',
312 'close_pull_request': '1',
313 'csrf_token': csrf_token},
313 'csrf_token': csrf_token},
314 extra_environ=xhr_header)
314 extra_environ=xhr_header)
315
315
316 pull_request = PullRequest.get(pull_request_id)
316 pull_request = PullRequest.get(pull_request_id)
317
317
318 journal = UserLog.query()\
318 journal = UserLog.query()\
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
319 .filter(UserLog.user_id == author, UserLog.repository_id == repo) \
320 .order_by('user_log_id') \
320 .order_by('user_log_id') \
321 .all()
321 .all()
322 assert journal[-1].action == 'repo.pull_request.close'
322 assert journal[-1].action == 'repo.pull_request.close'
323
323
324 # check only the latest status, not the review status
324 # check only the latest status, not the review status
325 status = ChangesetStatusModel().get_status(
325 status = ChangesetStatusModel().get_status(
326 pull_request.source_repo, pull_request=pull_request)
326 pull_request.source_repo, pull_request=pull_request)
327 assert status == ChangesetStatus.STATUS_REJECTED
327 assert status == ChangesetStatus.STATUS_REJECTED
328
328
329 def test_comment_and_close_pull_request(
329 def test_comment_and_close_pull_request(
330 self, pr_util, csrf_token, xhr_header):
330 self, pr_util, csrf_token, xhr_header):
331 pull_request = pr_util.create_pull_request()
331 pull_request = pr_util.create_pull_request()
332 pull_request_id = pull_request.pull_request_id
332 pull_request_id = pull_request.pull_request_id
333
333
334 response = self.app.post(
334 response = self.app.post(
335 route_path('pullrequest_comment_create',
335 route_path('pullrequest_comment_create',
336 repo_name=pull_request.target_repo.scm_instance().name,
336 repo_name=pull_request.target_repo.scm_instance().name,
337 pull_request_id=pull_request.pull_request_id),
337 pull_request_id=pull_request.pull_request_id),
338 params={
338 params={
339 'close_pull_request': 'true',
339 'close_pull_request': 'true',
340 'csrf_token': csrf_token},
340 'csrf_token': csrf_token},
341 extra_environ=xhr_header)
341 extra_environ=xhr_header)
342
342
343 assert response.json
343 assert response.json
344
344
345 pull_request = PullRequest.get(pull_request_id)
345 pull_request = PullRequest.get(pull_request_id)
346 assert pull_request.is_closed()
346 assert pull_request.is_closed()
347
347
348 # check only the latest status, not the review status
348 # check only the latest status, not the review status
349 status = ChangesetStatusModel().get_status(
349 status = ChangesetStatusModel().get_status(
350 pull_request.source_repo, pull_request=pull_request)
350 pull_request.source_repo, pull_request=pull_request)
351 assert status == ChangesetStatus.STATUS_REJECTED
351 assert status == ChangesetStatus.STATUS_REJECTED
352
352
353 def test_create_pull_request(self, backend, csrf_token):
353 def test_create_pull_request(self, backend, csrf_token):
354 commits = [
354 commits = [
355 {'message': 'ancestor'},
355 {'message': 'ancestor'},
356 {'message': 'change'},
356 {'message': 'change'},
357 {'message': 'change2'},
357 {'message': 'change2'},
358 ]
358 ]
359 commit_ids = backend.create_master_repo(commits)
359 commit_ids = backend.create_master_repo(commits)
360 target = backend.create_repo(heads=['ancestor'])
360 target = backend.create_repo(heads=['ancestor'])
361 source = backend.create_repo(heads=['change2'])
361 source = backend.create_repo(heads=['change2'])
362
362
363 response = self.app.post(
363 response = self.app.post(
364 route_path('pullrequest_create', repo_name=source.repo_name),
364 route_path('pullrequest_create', repo_name=source.repo_name),
365 [
365 [
366 ('source_repo', source.repo_name),
366 ('source_repo', source.repo_name),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
367 ('source_ref', 'branch:default:' + commit_ids['change2']),
368 ('target_repo', target.repo_name),
368 ('target_repo', target.repo_name),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
369 ('target_ref', 'branch:default:' + commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
370 ('common_ancestor', commit_ids['ancestor']),
371 ('pullrequest_desc', 'Description'),
371 ('pullrequest_desc', 'Description'),
372 ('pullrequest_title', 'Title'),
372 ('pullrequest_title', 'Title'),
373 ('__start__', 'review_members:sequence'),
373 ('__start__', 'review_members:sequence'),
374 ('__start__', 'reviewer:mapping'),
374 ('__start__', 'reviewer:mapping'),
375 ('user_id', '1'),
375 ('user_id', '1'),
376 ('__start__', 'reasons:sequence'),
376 ('__start__', 'reasons:sequence'),
377 ('reason', 'Some reason'),
377 ('reason', 'Some reason'),
378 ('__end__', 'reasons:sequence'),
378 ('__end__', 'reasons:sequence'),
379 ('__start__', 'rules:sequence'),
380 ('__end__', 'rules:sequence'),
379 ('mandatory', 'False'),
381 ('mandatory', 'False'),
380 ('__end__', 'reviewer:mapping'),
382 ('__end__', 'reviewer:mapping'),
381 ('__end__', 'review_members:sequence'),
383 ('__end__', 'review_members:sequence'),
382 ('__start__', 'revisions:sequence'),
384 ('__start__', 'revisions:sequence'),
383 ('revisions', commit_ids['change']),
385 ('revisions', commit_ids['change']),
384 ('revisions', commit_ids['change2']),
386 ('revisions', commit_ids['change2']),
385 ('__end__', 'revisions:sequence'),
387 ('__end__', 'revisions:sequence'),
386 ('user', ''),
388 ('user', ''),
387 ('csrf_token', csrf_token),
389 ('csrf_token', csrf_token),
388 ],
390 ],
389 status=302)
391 status=302)
390
392
391 location = response.headers['Location']
393 location = response.headers['Location']
392 pull_request_id = location.rsplit('/', 1)[1]
394 pull_request_id = location.rsplit('/', 1)[1]
393 assert pull_request_id != 'new'
395 assert pull_request_id != 'new'
394 pull_request = PullRequest.get(int(pull_request_id))
396 pull_request = PullRequest.get(int(pull_request_id))
395
397
396 # check that we have now both revisions
398 # check that we have now both revisions
397 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
399 assert pull_request.revisions == [commit_ids['change2'], commit_ids['change']]
398 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
400 assert pull_request.source_ref == 'branch:default:' + commit_ids['change2']
399 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
401 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
400 assert pull_request.target_ref == expected_target_ref
402 assert pull_request.target_ref == expected_target_ref
401
403
402 def test_reviewer_notifications(self, backend, csrf_token):
404 def test_reviewer_notifications(self, backend, csrf_token):
403 # We have to use the app.post for this test so it will create the
405 # We have to use the app.post for this test so it will create the
404 # notifications properly with the new PR
406 # notifications properly with the new PR
405 commits = [
407 commits = [
406 {'message': 'ancestor',
408 {'message': 'ancestor',
407 'added': [FileNode('file_A', content='content_of_ancestor')]},
409 'added': [FileNode('file_A', content='content_of_ancestor')]},
408 {'message': 'change',
410 {'message': 'change',
409 'added': [FileNode('file_a', content='content_of_change')]},
411 'added': [FileNode('file_a', content='content_of_change')]},
410 {'message': 'change-child'},
412 {'message': 'change-child'},
411 {'message': 'ancestor-child', 'parents': ['ancestor'],
413 {'message': 'ancestor-child', 'parents': ['ancestor'],
412 'added': [
414 'added': [
413 FileNode('file_B', content='content_of_ancestor_child')]},
415 FileNode('file_B', content='content_of_ancestor_child')]},
414 {'message': 'ancestor-child-2'},
416 {'message': 'ancestor-child-2'},
415 ]
417 ]
416 commit_ids = backend.create_master_repo(commits)
418 commit_ids = backend.create_master_repo(commits)
417 target = backend.create_repo(heads=['ancestor-child'])
419 target = backend.create_repo(heads=['ancestor-child'])
418 source = backend.create_repo(heads=['change'])
420 source = backend.create_repo(heads=['change'])
419
421
420 response = self.app.post(
422 response = self.app.post(
421 route_path('pullrequest_create', repo_name=source.repo_name),
423 route_path('pullrequest_create', repo_name=source.repo_name),
422 [
424 [
423 ('source_repo', source.repo_name),
425 ('source_repo', source.repo_name),
424 ('source_ref', 'branch:default:' + commit_ids['change']),
426 ('source_ref', 'branch:default:' + commit_ids['change']),
425 ('target_repo', target.repo_name),
427 ('target_repo', target.repo_name),
426 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
428 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
427 ('common_ancestor', commit_ids['ancestor']),
429 ('common_ancestor', commit_ids['ancestor']),
428 ('pullrequest_desc', 'Description'),
430 ('pullrequest_desc', 'Description'),
429 ('pullrequest_title', 'Title'),
431 ('pullrequest_title', 'Title'),
430 ('__start__', 'review_members:sequence'),
432 ('__start__', 'review_members:sequence'),
431 ('__start__', 'reviewer:mapping'),
433 ('__start__', 'reviewer:mapping'),
432 ('user_id', '2'),
434 ('user_id', '2'),
433 ('__start__', 'reasons:sequence'),
435 ('__start__', 'reasons:sequence'),
434 ('reason', 'Some reason'),
436 ('reason', 'Some reason'),
435 ('__end__', 'reasons:sequence'),
437 ('__end__', 'reasons:sequence'),
438 ('__start__', 'rules:sequence'),
439 ('__end__', 'rules:sequence'),
436 ('mandatory', 'False'),
440 ('mandatory', 'False'),
437 ('__end__', 'reviewer:mapping'),
441 ('__end__', 'reviewer:mapping'),
438 ('__end__', 'review_members:sequence'),
442 ('__end__', 'review_members:sequence'),
439 ('__start__', 'revisions:sequence'),
443 ('__start__', 'revisions:sequence'),
440 ('revisions', commit_ids['change']),
444 ('revisions', commit_ids['change']),
441 ('__end__', 'revisions:sequence'),
445 ('__end__', 'revisions:sequence'),
442 ('user', ''),
446 ('user', ''),
443 ('csrf_token', csrf_token),
447 ('csrf_token', csrf_token),
444 ],
448 ],
445 status=302)
449 status=302)
446
450
447 location = response.headers['Location']
451 location = response.headers['Location']
448
452
449 pull_request_id = location.rsplit('/', 1)[1]
453 pull_request_id = location.rsplit('/', 1)[1]
450 assert pull_request_id != 'new'
454 assert pull_request_id != 'new'
451 pull_request = PullRequest.get(int(pull_request_id))
455 pull_request = PullRequest.get(int(pull_request_id))
452
456
453 # Check that a notification was made
457 # Check that a notification was made
454 notifications = Notification.query()\
458 notifications = Notification.query()\
455 .filter(Notification.created_by == pull_request.author.user_id,
459 .filter(Notification.created_by == pull_request.author.user_id,
456 Notification.type_ == Notification.TYPE_PULL_REQUEST,
460 Notification.type_ == Notification.TYPE_PULL_REQUEST,
457 Notification.subject.contains(
461 Notification.subject.contains(
458 "wants you to review pull request #%s" % pull_request_id))
462 "wants you to review pull request #%s" % pull_request_id))
459 assert len(notifications.all()) == 1
463 assert len(notifications.all()) == 1
460
464
461 # Change reviewers and check that a notification was made
465 # Change reviewers and check that a notification was made
462 PullRequestModel().update_reviewers(
466 PullRequestModel().update_reviewers(
463 pull_request.pull_request_id, [(1, [], False)],
467 pull_request.pull_request_id, [(1, [], False, [])],
464 pull_request.author)
468 pull_request.author)
465 assert len(notifications.all()) == 2
469 assert len(notifications.all()) == 2
466
470
467 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
471 def test_create_pull_request_stores_ancestor_commit_id(self, backend,
468 csrf_token):
472 csrf_token):
469 commits = [
473 commits = [
470 {'message': 'ancestor',
474 {'message': 'ancestor',
471 'added': [FileNode('file_A', content='content_of_ancestor')]},
475 'added': [FileNode('file_A', content='content_of_ancestor')]},
472 {'message': 'change',
476 {'message': 'change',
473 'added': [FileNode('file_a', content='content_of_change')]},
477 'added': [FileNode('file_a', content='content_of_change')]},
474 {'message': 'change-child'},
478 {'message': 'change-child'},
475 {'message': 'ancestor-child', 'parents': ['ancestor'],
479 {'message': 'ancestor-child', 'parents': ['ancestor'],
476 'added': [
480 'added': [
477 FileNode('file_B', content='content_of_ancestor_child')]},
481 FileNode('file_B', content='content_of_ancestor_child')]},
478 {'message': 'ancestor-child-2'},
482 {'message': 'ancestor-child-2'},
479 ]
483 ]
480 commit_ids = backend.create_master_repo(commits)
484 commit_ids = backend.create_master_repo(commits)
481 target = backend.create_repo(heads=['ancestor-child'])
485 target = backend.create_repo(heads=['ancestor-child'])
482 source = backend.create_repo(heads=['change'])
486 source = backend.create_repo(heads=['change'])
483
487
484 response = self.app.post(
488 response = self.app.post(
485 route_path('pullrequest_create', repo_name=source.repo_name),
489 route_path('pullrequest_create', repo_name=source.repo_name),
486 [
490 [
487 ('source_repo', source.repo_name),
491 ('source_repo', source.repo_name),
488 ('source_ref', 'branch:default:' + commit_ids['change']),
492 ('source_ref', 'branch:default:' + commit_ids['change']),
489 ('target_repo', target.repo_name),
493 ('target_repo', target.repo_name),
490 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
494 ('target_ref', 'branch:default:' + commit_ids['ancestor-child']),
491 ('common_ancestor', commit_ids['ancestor']),
495 ('common_ancestor', commit_ids['ancestor']),
492 ('pullrequest_desc', 'Description'),
496 ('pullrequest_desc', 'Description'),
493 ('pullrequest_title', 'Title'),
497 ('pullrequest_title', 'Title'),
494 ('__start__', 'review_members:sequence'),
498 ('__start__', 'review_members:sequence'),
495 ('__start__', 'reviewer:mapping'),
499 ('__start__', 'reviewer:mapping'),
496 ('user_id', '1'),
500 ('user_id', '1'),
497 ('__start__', 'reasons:sequence'),
501 ('__start__', 'reasons:sequence'),
498 ('reason', 'Some reason'),
502 ('reason', 'Some reason'),
499 ('__end__', 'reasons:sequence'),
503 ('__end__', 'reasons:sequence'),
504 ('__start__', 'rules:sequence'),
505 ('__end__', 'rules:sequence'),
500 ('mandatory', 'False'),
506 ('mandatory', 'False'),
501 ('__end__', 'reviewer:mapping'),
507 ('__end__', 'reviewer:mapping'),
502 ('__end__', 'review_members:sequence'),
508 ('__end__', 'review_members:sequence'),
503 ('__start__', 'revisions:sequence'),
509 ('__start__', 'revisions:sequence'),
504 ('revisions', commit_ids['change']),
510 ('revisions', commit_ids['change']),
505 ('__end__', 'revisions:sequence'),
511 ('__end__', 'revisions:sequence'),
506 ('user', ''),
512 ('user', ''),
507 ('csrf_token', csrf_token),
513 ('csrf_token', csrf_token),
508 ],
514 ],
509 status=302)
515 status=302)
510
516
511 location = response.headers['Location']
517 location = response.headers['Location']
512
518
513 pull_request_id = location.rsplit('/', 1)[1]
519 pull_request_id = location.rsplit('/', 1)[1]
514 assert pull_request_id != 'new'
520 assert pull_request_id != 'new'
515 pull_request = PullRequest.get(int(pull_request_id))
521 pull_request = PullRequest.get(int(pull_request_id))
516
522
517 # target_ref has to point to the ancestor's commit_id in order to
523 # target_ref has to point to the ancestor's commit_id in order to
518 # show the correct diff
524 # show the correct diff
519 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
525 expected_target_ref = 'branch:default:' + commit_ids['ancestor']
520 assert pull_request.target_ref == expected_target_ref
526 assert pull_request.target_ref == expected_target_ref
521
527
522 # Check generated diff contents
528 # Check generated diff contents
523 response = response.follow()
529 response = response.follow()
524 assert 'content_of_ancestor' not in response.body
530 assert 'content_of_ancestor' not in response.body
525 assert 'content_of_ancestor-child' not in response.body
531 assert 'content_of_ancestor-child' not in response.body
526 assert 'content_of_change' in response.body
532 assert 'content_of_change' in response.body
527
533
528 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
534 def test_merge_pull_request_enabled(self, pr_util, csrf_token):
529 # Clear any previous calls to rcextensions
535 # Clear any previous calls to rcextensions
530 rhodecode.EXTENSIONS.calls.clear()
536 rhodecode.EXTENSIONS.calls.clear()
531
537
532 pull_request = pr_util.create_pull_request(
538 pull_request = pr_util.create_pull_request(
533 approved=True, mergeable=True)
539 approved=True, mergeable=True)
534 pull_request_id = pull_request.pull_request_id
540 pull_request_id = pull_request.pull_request_id
535 repo_name = pull_request.target_repo.scm_instance().name,
541 repo_name = pull_request.target_repo.scm_instance().name,
536
542
537 response = self.app.post(
543 response = self.app.post(
538 route_path('pullrequest_merge',
544 route_path('pullrequest_merge',
539 repo_name=str(repo_name[0]),
545 repo_name=str(repo_name[0]),
540 pull_request_id=pull_request_id),
546 pull_request_id=pull_request_id),
541 params={'csrf_token': csrf_token}).follow()
547 params={'csrf_token': csrf_token}).follow()
542
548
543 pull_request = PullRequest.get(pull_request_id)
549 pull_request = PullRequest.get(pull_request_id)
544
550
545 assert response.status_int == 200
551 assert response.status_int == 200
546 assert pull_request.is_closed()
552 assert pull_request.is_closed()
547 assert_pull_request_status(
553 assert_pull_request_status(
548 pull_request, ChangesetStatus.STATUS_APPROVED)
554 pull_request, ChangesetStatus.STATUS_APPROVED)
549
555
550 # Check the relevant log entries were added
556 # Check the relevant log entries were added
551 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
557 user_logs = UserLog.query().order_by('-user_log_id').limit(3)
552 actions = [log.action for log in user_logs]
558 actions = [log.action for log in user_logs]
553 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
559 pr_commit_ids = PullRequestModel()._get_commit_ids(pull_request)
554 expected_actions = [
560 expected_actions = [
555 u'repo.pull_request.close',
561 u'repo.pull_request.close',
556 u'repo.pull_request.merge',
562 u'repo.pull_request.merge',
557 u'repo.pull_request.comment.create'
563 u'repo.pull_request.comment.create'
558 ]
564 ]
559 assert actions == expected_actions
565 assert actions == expected_actions
560
566
561 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
567 user_logs = UserLog.query().order_by('-user_log_id').limit(4)
562 actions = [log for log in user_logs]
568 actions = [log for log in user_logs]
563 assert actions[-1].action == 'user.push'
569 assert actions[-1].action == 'user.push'
564 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
570 assert actions[-1].action_data['commit_ids'] == pr_commit_ids
565
571
566 # Check post_push rcextension was really executed
572 # Check post_push rcextension was really executed
567 push_calls = rhodecode.EXTENSIONS.calls['post_push']
573 push_calls = rhodecode.EXTENSIONS.calls['post_push']
568 assert len(push_calls) == 1
574 assert len(push_calls) == 1
569 unused_last_call_args, last_call_kwargs = push_calls[0]
575 unused_last_call_args, last_call_kwargs = push_calls[0]
570 assert last_call_kwargs['action'] == 'push'
576 assert last_call_kwargs['action'] == 'push'
571 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
577 assert last_call_kwargs['pushed_revs'] == pr_commit_ids
572
578
573 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
579 def test_merge_pull_request_disabled(self, pr_util, csrf_token):
574 pull_request = pr_util.create_pull_request(mergeable=False)
580 pull_request = pr_util.create_pull_request(mergeable=False)
575 pull_request_id = pull_request.pull_request_id
581 pull_request_id = pull_request.pull_request_id
576 pull_request = PullRequest.get(pull_request_id)
582 pull_request = PullRequest.get(pull_request_id)
577
583
578 response = self.app.post(
584 response = self.app.post(
579 route_path('pullrequest_merge',
585 route_path('pullrequest_merge',
580 repo_name=pull_request.target_repo.scm_instance().name,
586 repo_name=pull_request.target_repo.scm_instance().name,
581 pull_request_id=pull_request.pull_request_id),
587 pull_request_id=pull_request.pull_request_id),
582 params={'csrf_token': csrf_token}).follow()
588 params={'csrf_token': csrf_token}).follow()
583
589
584 assert response.status_int == 200
590 assert response.status_int == 200
585 response.mustcontain(
591 response.mustcontain(
586 'Merge is not currently possible because of below failed checks.')
592 'Merge is not currently possible because of below failed checks.')
587 response.mustcontain('Server-side pull request merging is disabled.')
593 response.mustcontain('Server-side pull request merging is disabled.')
588
594
589 @pytest.mark.skip_backends('svn')
595 @pytest.mark.skip_backends('svn')
590 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
596 def test_merge_pull_request_not_approved(self, pr_util, csrf_token):
591 pull_request = pr_util.create_pull_request(mergeable=True)
597 pull_request = pr_util.create_pull_request(mergeable=True)
592 pull_request_id = pull_request.pull_request_id
598 pull_request_id = pull_request.pull_request_id
593 repo_name = pull_request.target_repo.scm_instance().name
599 repo_name = pull_request.target_repo.scm_instance().name
594
600
595 response = self.app.post(
601 response = self.app.post(
596 route_path('pullrequest_merge',
602 route_path('pullrequest_merge',
597 repo_name=repo_name,
603 repo_name=repo_name,
598 pull_request_id=pull_request_id),
604 pull_request_id=pull_request_id),
599 params={'csrf_token': csrf_token}).follow()
605 params={'csrf_token': csrf_token}).follow()
600
606
601 assert response.status_int == 200
607 assert response.status_int == 200
602
608
603 response.mustcontain(
609 response.mustcontain(
604 'Merge is not currently possible because of below failed checks.')
610 'Merge is not currently possible because of below failed checks.')
605 response.mustcontain('Pull request reviewer approval is pending.')
611 response.mustcontain('Pull request reviewer approval is pending.')
606
612
607 def test_merge_pull_request_renders_failure_reason(
613 def test_merge_pull_request_renders_failure_reason(
608 self, user_regular, csrf_token, pr_util):
614 self, user_regular, csrf_token, pr_util):
609 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
615 pull_request = pr_util.create_pull_request(mergeable=True, approved=True)
610 pull_request_id = pull_request.pull_request_id
616 pull_request_id = pull_request.pull_request_id
611 repo_name = pull_request.target_repo.scm_instance().name
617 repo_name = pull_request.target_repo.scm_instance().name
612
618
613 model_patcher = mock.patch.multiple(
619 model_patcher = mock.patch.multiple(
614 PullRequestModel,
620 PullRequestModel,
615 merge=mock.Mock(return_value=MergeResponse(
621 merge=mock.Mock(return_value=MergeResponse(
616 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
622 True, False, 'STUB_COMMIT_ID', MergeFailureReason.PUSH_FAILED)),
617 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
623 merge_status=mock.Mock(return_value=(True, 'WRONG_MESSAGE')))
618
624
619 with model_patcher:
625 with model_patcher:
620 response = self.app.post(
626 response = self.app.post(
621 route_path('pullrequest_merge',
627 route_path('pullrequest_merge',
622 repo_name=repo_name,
628 repo_name=repo_name,
623 pull_request_id=pull_request_id),
629 pull_request_id=pull_request_id),
624 params={'csrf_token': csrf_token}, status=302)
630 params={'csrf_token': csrf_token}, status=302)
625
631
626 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
632 assert_session_flash(response, PullRequestModel.MERGE_STATUS_MESSAGES[
627 MergeFailureReason.PUSH_FAILED])
633 MergeFailureReason.PUSH_FAILED])
628
634
629 def test_update_source_revision(self, backend, csrf_token):
635 def test_update_source_revision(self, backend, csrf_token):
630 commits = [
636 commits = [
631 {'message': 'ancestor'},
637 {'message': 'ancestor'},
632 {'message': 'change'},
638 {'message': 'change'},
633 {'message': 'change-2'},
639 {'message': 'change-2'},
634 ]
640 ]
635 commit_ids = backend.create_master_repo(commits)
641 commit_ids = backend.create_master_repo(commits)
636 target = backend.create_repo(heads=['ancestor'])
642 target = backend.create_repo(heads=['ancestor'])
637 source = backend.create_repo(heads=['change'])
643 source = backend.create_repo(heads=['change'])
638
644
639 # create pr from a in source to A in target
645 # create pr from a in source to A in target
640 pull_request = PullRequest()
646 pull_request = PullRequest()
641 pull_request.source_repo = source
647 pull_request.source_repo = source
642 # TODO: johbo: Make sure that we write the source ref this way!
648 # TODO: johbo: Make sure that we write the source ref this way!
643 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
649 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
644 branch=backend.default_branch_name, commit_id=commit_ids['change'])
650 branch=backend.default_branch_name, commit_id=commit_ids['change'])
645 pull_request.target_repo = target
651 pull_request.target_repo = target
646
652
647 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
653 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
648 branch=backend.default_branch_name,
654 branch=backend.default_branch_name,
649 commit_id=commit_ids['ancestor'])
655 commit_id=commit_ids['ancestor'])
650 pull_request.revisions = [commit_ids['change']]
656 pull_request.revisions = [commit_ids['change']]
651 pull_request.title = u"Test"
657 pull_request.title = u"Test"
652 pull_request.description = u"Description"
658 pull_request.description = u"Description"
653 pull_request.author = UserModel().get_by_username(
659 pull_request.author = UserModel().get_by_username(
654 TEST_USER_ADMIN_LOGIN)
660 TEST_USER_ADMIN_LOGIN)
655 Session().add(pull_request)
661 Session().add(pull_request)
656 Session().commit()
662 Session().commit()
657 pull_request_id = pull_request.pull_request_id
663 pull_request_id = pull_request.pull_request_id
658
664
659 # source has ancestor - change - change-2
665 # source has ancestor - change - change-2
660 backend.pull_heads(source, heads=['change-2'])
666 backend.pull_heads(source, heads=['change-2'])
661
667
662 # update PR
668 # update PR
663 self.app.post(
669 self.app.post(
664 route_path('pullrequest_update',
670 route_path('pullrequest_update',
665 repo_name=target.repo_name,
671 repo_name=target.repo_name,
666 pull_request_id=pull_request_id),
672 pull_request_id=pull_request_id),
667 params={'update_commits': 'true',
673 params={'update_commits': 'true',
668 'csrf_token': csrf_token})
674 'csrf_token': csrf_token})
669
675
670 # check that we have now both revisions
676 # check that we have now both revisions
671 pull_request = PullRequest.get(pull_request_id)
677 pull_request = PullRequest.get(pull_request_id)
672 assert pull_request.revisions == [
678 assert pull_request.revisions == [
673 commit_ids['change-2'], commit_ids['change']]
679 commit_ids['change-2'], commit_ids['change']]
674
680
675 # TODO: johbo: this should be a test on its own
681 # TODO: johbo: this should be a test on its own
676 response = self.app.get(route_path(
682 response = self.app.get(route_path(
677 'pullrequest_new',
683 'pullrequest_new',
678 repo_name=target.repo_name))
684 repo_name=target.repo_name))
679 assert response.status_int == 200
685 assert response.status_int == 200
680 assert 'Pull request updated to' in response.body
686 assert 'Pull request updated to' in response.body
681 assert 'with 1 added, 0 removed commits.' in response.body
687 assert 'with 1 added, 0 removed commits.' in response.body
682
688
683 def test_update_target_revision(self, backend, csrf_token):
689 def test_update_target_revision(self, backend, csrf_token):
684 commits = [
690 commits = [
685 {'message': 'ancestor'},
691 {'message': 'ancestor'},
686 {'message': 'change'},
692 {'message': 'change'},
687 {'message': 'ancestor-new', 'parents': ['ancestor']},
693 {'message': 'ancestor-new', 'parents': ['ancestor']},
688 {'message': 'change-rebased'},
694 {'message': 'change-rebased'},
689 ]
695 ]
690 commit_ids = backend.create_master_repo(commits)
696 commit_ids = backend.create_master_repo(commits)
691 target = backend.create_repo(heads=['ancestor'])
697 target = backend.create_repo(heads=['ancestor'])
692 source = backend.create_repo(heads=['change'])
698 source = backend.create_repo(heads=['change'])
693
699
694 # create pr from a in source to A in target
700 # create pr from a in source to A in target
695 pull_request = PullRequest()
701 pull_request = PullRequest()
696 pull_request.source_repo = source
702 pull_request.source_repo = source
697 # TODO: johbo: Make sure that we write the source ref this way!
703 # TODO: johbo: Make sure that we write the source ref this way!
698 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
704 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
699 branch=backend.default_branch_name, commit_id=commit_ids['change'])
705 branch=backend.default_branch_name, commit_id=commit_ids['change'])
700 pull_request.target_repo = target
706 pull_request.target_repo = target
701 # TODO: johbo: Target ref should be branch based, since tip can jump
707 # TODO: johbo: Target ref should be branch based, since tip can jump
702 # from branch to branch
708 # from branch to branch
703 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
709 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
704 branch=backend.default_branch_name,
710 branch=backend.default_branch_name,
705 commit_id=commit_ids['ancestor'])
711 commit_id=commit_ids['ancestor'])
706 pull_request.revisions = [commit_ids['change']]
712 pull_request.revisions = [commit_ids['change']]
707 pull_request.title = u"Test"
713 pull_request.title = u"Test"
708 pull_request.description = u"Description"
714 pull_request.description = u"Description"
709 pull_request.author = UserModel().get_by_username(
715 pull_request.author = UserModel().get_by_username(
710 TEST_USER_ADMIN_LOGIN)
716 TEST_USER_ADMIN_LOGIN)
711 Session().add(pull_request)
717 Session().add(pull_request)
712 Session().commit()
718 Session().commit()
713 pull_request_id = pull_request.pull_request_id
719 pull_request_id = pull_request.pull_request_id
714
720
715 # target has ancestor - ancestor-new
721 # target has ancestor - ancestor-new
716 # source has ancestor - ancestor-new - change-rebased
722 # source has ancestor - ancestor-new - change-rebased
717 backend.pull_heads(target, heads=['ancestor-new'])
723 backend.pull_heads(target, heads=['ancestor-new'])
718 backend.pull_heads(source, heads=['change-rebased'])
724 backend.pull_heads(source, heads=['change-rebased'])
719
725
720 # update PR
726 # update PR
721 self.app.post(
727 self.app.post(
722 route_path('pullrequest_update',
728 route_path('pullrequest_update',
723 repo_name=target.repo_name,
729 repo_name=target.repo_name,
724 pull_request_id=pull_request_id),
730 pull_request_id=pull_request_id),
725 params={'update_commits': 'true',
731 params={'update_commits': 'true',
726 'csrf_token': csrf_token},
732 'csrf_token': csrf_token},
727 status=200)
733 status=200)
728
734
729 # check that we have now both revisions
735 # check that we have now both revisions
730 pull_request = PullRequest.get(pull_request_id)
736 pull_request = PullRequest.get(pull_request_id)
731 assert pull_request.revisions == [commit_ids['change-rebased']]
737 assert pull_request.revisions == [commit_ids['change-rebased']]
732 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
738 assert pull_request.target_ref == 'branch:{branch}:{commit_id}'.format(
733 branch=backend.default_branch_name,
739 branch=backend.default_branch_name,
734 commit_id=commit_ids['ancestor-new'])
740 commit_id=commit_ids['ancestor-new'])
735
741
736 # TODO: johbo: This should be a test on its own
742 # TODO: johbo: This should be a test on its own
737 response = self.app.get(route_path(
743 response = self.app.get(route_path(
738 'pullrequest_new',
744 'pullrequest_new',
739 repo_name=target.repo_name))
745 repo_name=target.repo_name))
740 assert response.status_int == 200
746 assert response.status_int == 200
741 assert 'Pull request updated to' in response.body
747 assert 'Pull request updated to' in response.body
742 assert 'with 1 added, 1 removed commits.' in response.body
748 assert 'with 1 added, 1 removed commits.' in response.body
743
749
744 def test_update_of_ancestor_reference(self, backend, csrf_token):
750 def test_update_of_ancestor_reference(self, backend, csrf_token):
745 commits = [
751 commits = [
746 {'message': 'ancestor'},
752 {'message': 'ancestor'},
747 {'message': 'change'},
753 {'message': 'change'},
748 {'message': 'change-2'},
754 {'message': 'change-2'},
749 {'message': 'ancestor-new', 'parents': ['ancestor']},
755 {'message': 'ancestor-new', 'parents': ['ancestor']},
750 {'message': 'change-rebased'},
756 {'message': 'change-rebased'},
751 ]
757 ]
752 commit_ids = backend.create_master_repo(commits)
758 commit_ids = backend.create_master_repo(commits)
753 target = backend.create_repo(heads=['ancestor'])
759 target = backend.create_repo(heads=['ancestor'])
754 source = backend.create_repo(heads=['change'])
760 source = backend.create_repo(heads=['change'])
755
761
756 # create pr from a in source to A in target
762 # create pr from a in source to A in target
757 pull_request = PullRequest()
763 pull_request = PullRequest()
758 pull_request.source_repo = source
764 pull_request.source_repo = source
759 # TODO: johbo: Make sure that we write the source ref this way!
765 # TODO: johbo: Make sure that we write the source ref this way!
760 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
766 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
761 branch=backend.default_branch_name,
767 branch=backend.default_branch_name,
762 commit_id=commit_ids['change'])
768 commit_id=commit_ids['change'])
763 pull_request.target_repo = target
769 pull_request.target_repo = target
764 # TODO: johbo: Target ref should be branch based, since tip can jump
770 # TODO: johbo: Target ref should be branch based, since tip can jump
765 # from branch to branch
771 # from branch to branch
766 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
772 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
767 branch=backend.default_branch_name,
773 branch=backend.default_branch_name,
768 commit_id=commit_ids['ancestor'])
774 commit_id=commit_ids['ancestor'])
769 pull_request.revisions = [commit_ids['change']]
775 pull_request.revisions = [commit_ids['change']]
770 pull_request.title = u"Test"
776 pull_request.title = u"Test"
771 pull_request.description = u"Description"
777 pull_request.description = u"Description"
772 pull_request.author = UserModel().get_by_username(
778 pull_request.author = UserModel().get_by_username(
773 TEST_USER_ADMIN_LOGIN)
779 TEST_USER_ADMIN_LOGIN)
774 Session().add(pull_request)
780 Session().add(pull_request)
775 Session().commit()
781 Session().commit()
776 pull_request_id = pull_request.pull_request_id
782 pull_request_id = pull_request.pull_request_id
777
783
778 # target has ancestor - ancestor-new
784 # target has ancestor - ancestor-new
779 # source has ancestor - ancestor-new - change-rebased
785 # source has ancestor - ancestor-new - change-rebased
780 backend.pull_heads(target, heads=['ancestor-new'])
786 backend.pull_heads(target, heads=['ancestor-new'])
781 backend.pull_heads(source, heads=['change-rebased'])
787 backend.pull_heads(source, heads=['change-rebased'])
782
788
783 # update PR
789 # update PR
784 self.app.post(
790 self.app.post(
785 route_path('pullrequest_update',
791 route_path('pullrequest_update',
786 repo_name=target.repo_name,
792 repo_name=target.repo_name,
787 pull_request_id=pull_request_id),
793 pull_request_id=pull_request_id),
788 params={'update_commits': 'true',
794 params={'update_commits': 'true',
789 'csrf_token': csrf_token},
795 'csrf_token': csrf_token},
790 status=200)
796 status=200)
791
797
792 # Expect the target reference to be updated correctly
798 # Expect the target reference to be updated correctly
793 pull_request = PullRequest.get(pull_request_id)
799 pull_request = PullRequest.get(pull_request_id)
794 assert pull_request.revisions == [commit_ids['change-rebased']]
800 assert pull_request.revisions == [commit_ids['change-rebased']]
795 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
801 expected_target_ref = 'branch:{branch}:{commit_id}'.format(
796 branch=backend.default_branch_name,
802 branch=backend.default_branch_name,
797 commit_id=commit_ids['ancestor-new'])
803 commit_id=commit_ids['ancestor-new'])
798 assert pull_request.target_ref == expected_target_ref
804 assert pull_request.target_ref == expected_target_ref
799
805
800 def test_remove_pull_request_branch(self, backend_git, csrf_token):
806 def test_remove_pull_request_branch(self, backend_git, csrf_token):
801 branch_name = 'development'
807 branch_name = 'development'
802 commits = [
808 commits = [
803 {'message': 'initial-commit'},
809 {'message': 'initial-commit'},
804 {'message': 'old-feature'},
810 {'message': 'old-feature'},
805 {'message': 'new-feature', 'branch': branch_name},
811 {'message': 'new-feature', 'branch': branch_name},
806 ]
812 ]
807 repo = backend_git.create_repo(commits)
813 repo = backend_git.create_repo(commits)
808 commit_ids = backend_git.commit_ids
814 commit_ids = backend_git.commit_ids
809
815
810 pull_request = PullRequest()
816 pull_request = PullRequest()
811 pull_request.source_repo = repo
817 pull_request.source_repo = repo
812 pull_request.target_repo = repo
818 pull_request.target_repo = repo
813 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
819 pull_request.source_ref = 'branch:{branch}:{commit_id}'.format(
814 branch=branch_name, commit_id=commit_ids['new-feature'])
820 branch=branch_name, commit_id=commit_ids['new-feature'])
815 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
821 pull_request.target_ref = 'branch:{branch}:{commit_id}'.format(
816 branch=backend_git.default_branch_name,
822 branch=backend_git.default_branch_name,
817 commit_id=commit_ids['old-feature'])
823 commit_id=commit_ids['old-feature'])
818 pull_request.revisions = [commit_ids['new-feature']]
824 pull_request.revisions = [commit_ids['new-feature']]
819 pull_request.title = u"Test"
825 pull_request.title = u"Test"
820 pull_request.description = u"Description"
826 pull_request.description = u"Description"
821 pull_request.author = UserModel().get_by_username(
827 pull_request.author = UserModel().get_by_username(
822 TEST_USER_ADMIN_LOGIN)
828 TEST_USER_ADMIN_LOGIN)
823 Session().add(pull_request)
829 Session().add(pull_request)
824 Session().commit()
830 Session().commit()
825
831
826 vcs = repo.scm_instance()
832 vcs = repo.scm_instance()
827 vcs.remove_ref('refs/heads/{}'.format(branch_name))
833 vcs.remove_ref('refs/heads/{}'.format(branch_name))
828
834
829 response = self.app.get(route_path(
835 response = self.app.get(route_path(
830 'pullrequest_show',
836 'pullrequest_show',
831 repo_name=repo.repo_name,
837 repo_name=repo.repo_name,
832 pull_request_id=pull_request.pull_request_id))
838 pull_request_id=pull_request.pull_request_id))
833
839
834 assert response.status_int == 200
840 assert response.status_int == 200
835 assert_response = AssertResponse(response)
841 assert_response = AssertResponse(response)
836 assert_response.element_contains(
842 assert_response.element_contains(
837 '#changeset_compare_view_content .alert strong',
843 '#changeset_compare_view_content .alert strong',
838 'Missing commits')
844 'Missing commits')
839 assert_response.element_contains(
845 assert_response.element_contains(
840 '#changeset_compare_view_content .alert',
846 '#changeset_compare_view_content .alert',
841 'This pull request cannot be displayed, because one or more'
847 'This pull request cannot be displayed, because one or more'
842 ' commits no longer exist in the source repository.')
848 ' commits no longer exist in the source repository.')
843
849
844 def test_strip_commits_from_pull_request(
850 def test_strip_commits_from_pull_request(
845 self, backend, pr_util, csrf_token):
851 self, backend, pr_util, csrf_token):
846 commits = [
852 commits = [
847 {'message': 'initial-commit'},
853 {'message': 'initial-commit'},
848 {'message': 'old-feature'},
854 {'message': 'old-feature'},
849 {'message': 'new-feature', 'parents': ['initial-commit']},
855 {'message': 'new-feature', 'parents': ['initial-commit']},
850 ]
856 ]
851 pull_request = pr_util.create_pull_request(
857 pull_request = pr_util.create_pull_request(
852 commits, target_head='initial-commit', source_head='new-feature',
858 commits, target_head='initial-commit', source_head='new-feature',
853 revisions=['new-feature'])
859 revisions=['new-feature'])
854
860
855 vcs = pr_util.source_repository.scm_instance()
861 vcs = pr_util.source_repository.scm_instance()
856 if backend.alias == 'git':
862 if backend.alias == 'git':
857 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
863 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
858 else:
864 else:
859 vcs.strip(pr_util.commit_ids['new-feature'])
865 vcs.strip(pr_util.commit_ids['new-feature'])
860
866
861 response = self.app.get(route_path(
867 response = self.app.get(route_path(
862 'pullrequest_show',
868 'pullrequest_show',
863 repo_name=pr_util.target_repository.repo_name,
869 repo_name=pr_util.target_repository.repo_name,
864 pull_request_id=pull_request.pull_request_id))
870 pull_request_id=pull_request.pull_request_id))
865
871
866 assert response.status_int == 200
872 assert response.status_int == 200
867 assert_response = AssertResponse(response)
873 assert_response = AssertResponse(response)
868 assert_response.element_contains(
874 assert_response.element_contains(
869 '#changeset_compare_view_content .alert strong',
875 '#changeset_compare_view_content .alert strong',
870 'Missing commits')
876 'Missing commits')
871 assert_response.element_contains(
877 assert_response.element_contains(
872 '#changeset_compare_view_content .alert',
878 '#changeset_compare_view_content .alert',
873 'This pull request cannot be displayed, because one or more'
879 'This pull request cannot be displayed, because one or more'
874 ' commits no longer exist in the source repository.')
880 ' commits no longer exist in the source repository.')
875 assert_response.element_contains(
881 assert_response.element_contains(
876 '#update_commits',
882 '#update_commits',
877 'Update commits')
883 'Update commits')
878
884
879 def test_strip_commits_and_update(
885 def test_strip_commits_and_update(
880 self, backend, pr_util, csrf_token):
886 self, backend, pr_util, csrf_token):
881 commits = [
887 commits = [
882 {'message': 'initial-commit'},
888 {'message': 'initial-commit'},
883 {'message': 'old-feature'},
889 {'message': 'old-feature'},
884 {'message': 'new-feature', 'parents': ['old-feature']},
890 {'message': 'new-feature', 'parents': ['old-feature']},
885 ]
891 ]
886 pull_request = pr_util.create_pull_request(
892 pull_request = pr_util.create_pull_request(
887 commits, target_head='old-feature', source_head='new-feature',
893 commits, target_head='old-feature', source_head='new-feature',
888 revisions=['new-feature'], mergeable=True)
894 revisions=['new-feature'], mergeable=True)
889
895
890 vcs = pr_util.source_repository.scm_instance()
896 vcs = pr_util.source_repository.scm_instance()
891 if backend.alias == 'git':
897 if backend.alias == 'git':
892 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
898 vcs.strip(pr_util.commit_ids['new-feature'], branch_name='master')
893 else:
899 else:
894 vcs.strip(pr_util.commit_ids['new-feature'])
900 vcs.strip(pr_util.commit_ids['new-feature'])
895
901
896 response = self.app.post(
902 response = self.app.post(
897 route_path('pullrequest_update',
903 route_path('pullrequest_update',
898 repo_name=pull_request.target_repo.repo_name,
904 repo_name=pull_request.target_repo.repo_name,
899 pull_request_id=pull_request.pull_request_id),
905 pull_request_id=pull_request.pull_request_id),
900 params={'update_commits': 'true',
906 params={'update_commits': 'true',
901 'csrf_token': csrf_token})
907 'csrf_token': csrf_token})
902
908
903 assert response.status_int == 200
909 assert response.status_int == 200
904 assert response.body == 'true'
910 assert response.body == 'true'
905
911
906 # Make sure that after update, it won't raise 500 errors
912 # Make sure that after update, it won't raise 500 errors
907 response = self.app.get(route_path(
913 response = self.app.get(route_path(
908 'pullrequest_show',
914 'pullrequest_show',
909 repo_name=pr_util.target_repository.repo_name,
915 repo_name=pr_util.target_repository.repo_name,
910 pull_request_id=pull_request.pull_request_id))
916 pull_request_id=pull_request.pull_request_id))
911
917
912 assert response.status_int == 200
918 assert response.status_int == 200
913 assert_response = AssertResponse(response)
919 assert_response = AssertResponse(response)
914 assert_response.element_contains(
920 assert_response.element_contains(
915 '#changeset_compare_view_content .alert strong',
921 '#changeset_compare_view_content .alert strong',
916 'Missing commits')
922 'Missing commits')
917
923
918 def test_branch_is_a_link(self, pr_util):
924 def test_branch_is_a_link(self, pr_util):
919 pull_request = pr_util.create_pull_request()
925 pull_request = pr_util.create_pull_request()
920 pull_request.source_ref = 'branch:origin:1234567890abcdef'
926 pull_request.source_ref = 'branch:origin:1234567890abcdef'
921 pull_request.target_ref = 'branch:target:abcdef1234567890'
927 pull_request.target_ref = 'branch:target:abcdef1234567890'
922 Session().add(pull_request)
928 Session().add(pull_request)
923 Session().commit()
929 Session().commit()
924
930
925 response = self.app.get(route_path(
931 response = self.app.get(route_path(
926 'pullrequest_show',
932 'pullrequest_show',
927 repo_name=pull_request.target_repo.scm_instance().name,
933 repo_name=pull_request.target_repo.scm_instance().name,
928 pull_request_id=pull_request.pull_request_id))
934 pull_request_id=pull_request.pull_request_id))
929 assert response.status_int == 200
935 assert response.status_int == 200
930 assert_response = AssertResponse(response)
936 assert_response = AssertResponse(response)
931
937
932 origin = assert_response.get_element('.pr-origininfo .tag')
938 origin = assert_response.get_element('.pr-origininfo .tag')
933 origin_children = origin.getchildren()
939 origin_children = origin.getchildren()
934 assert len(origin_children) == 1
940 assert len(origin_children) == 1
935 target = assert_response.get_element('.pr-targetinfo .tag')
941 target = assert_response.get_element('.pr-targetinfo .tag')
936 target_children = target.getchildren()
942 target_children = target.getchildren()
937 assert len(target_children) == 1
943 assert len(target_children) == 1
938
944
939 expected_origin_link = route_path(
945 expected_origin_link = route_path(
940 'repo_changelog',
946 'repo_changelog',
941 repo_name=pull_request.source_repo.scm_instance().name,
947 repo_name=pull_request.source_repo.scm_instance().name,
942 params=dict(branch='origin'))
948 params=dict(branch='origin'))
943 expected_target_link = route_path(
949 expected_target_link = route_path(
944 'repo_changelog',
950 'repo_changelog',
945 repo_name=pull_request.target_repo.scm_instance().name,
951 repo_name=pull_request.target_repo.scm_instance().name,
946 params=dict(branch='target'))
952 params=dict(branch='target'))
947 assert origin_children[0].attrib['href'] == expected_origin_link
953 assert origin_children[0].attrib['href'] == expected_origin_link
948 assert origin_children[0].text == 'branch: origin'
954 assert origin_children[0].text == 'branch: origin'
949 assert target_children[0].attrib['href'] == expected_target_link
955 assert target_children[0].attrib['href'] == expected_target_link
950 assert target_children[0].text == 'branch: target'
956 assert target_children[0].text == 'branch: target'
951
957
952 def test_bookmark_is_not_a_link(self, pr_util):
958 def test_bookmark_is_not_a_link(self, pr_util):
953 pull_request = pr_util.create_pull_request()
959 pull_request = pr_util.create_pull_request()
954 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
960 pull_request.source_ref = 'bookmark:origin:1234567890abcdef'
955 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
961 pull_request.target_ref = 'bookmark:target:abcdef1234567890'
956 Session().add(pull_request)
962 Session().add(pull_request)
957 Session().commit()
963 Session().commit()
958
964
959 response = self.app.get(route_path(
965 response = self.app.get(route_path(
960 'pullrequest_show',
966 'pullrequest_show',
961 repo_name=pull_request.target_repo.scm_instance().name,
967 repo_name=pull_request.target_repo.scm_instance().name,
962 pull_request_id=pull_request.pull_request_id))
968 pull_request_id=pull_request.pull_request_id))
963 assert response.status_int == 200
969 assert response.status_int == 200
964 assert_response = AssertResponse(response)
970 assert_response = AssertResponse(response)
965
971
966 origin = assert_response.get_element('.pr-origininfo .tag')
972 origin = assert_response.get_element('.pr-origininfo .tag')
967 assert origin.text.strip() == 'bookmark: origin'
973 assert origin.text.strip() == 'bookmark: origin'
968 assert origin.getchildren() == []
974 assert origin.getchildren() == []
969
975
970 target = assert_response.get_element('.pr-targetinfo .tag')
976 target = assert_response.get_element('.pr-targetinfo .tag')
971 assert target.text.strip() == 'bookmark: target'
977 assert target.text.strip() == 'bookmark: target'
972 assert target.getchildren() == []
978 assert target.getchildren() == []
973
979
974 def test_tag_is_not_a_link(self, pr_util):
980 def test_tag_is_not_a_link(self, pr_util):
975 pull_request = pr_util.create_pull_request()
981 pull_request = pr_util.create_pull_request()
976 pull_request.source_ref = 'tag:origin:1234567890abcdef'
982 pull_request.source_ref = 'tag:origin:1234567890abcdef'
977 pull_request.target_ref = 'tag:target:abcdef1234567890'
983 pull_request.target_ref = 'tag:target:abcdef1234567890'
978 Session().add(pull_request)
984 Session().add(pull_request)
979 Session().commit()
985 Session().commit()
980
986
981 response = self.app.get(route_path(
987 response = self.app.get(route_path(
982 'pullrequest_show',
988 'pullrequest_show',
983 repo_name=pull_request.target_repo.scm_instance().name,
989 repo_name=pull_request.target_repo.scm_instance().name,
984 pull_request_id=pull_request.pull_request_id))
990 pull_request_id=pull_request.pull_request_id))
985 assert response.status_int == 200
991 assert response.status_int == 200
986 assert_response = AssertResponse(response)
992 assert_response = AssertResponse(response)
987
993
988 origin = assert_response.get_element('.pr-origininfo .tag')
994 origin = assert_response.get_element('.pr-origininfo .tag')
989 assert origin.text.strip() == 'tag: origin'
995 assert origin.text.strip() == 'tag: origin'
990 assert origin.getchildren() == []
996 assert origin.getchildren() == []
991
997
992 target = assert_response.get_element('.pr-targetinfo .tag')
998 target = assert_response.get_element('.pr-targetinfo .tag')
993 assert target.text.strip() == 'tag: target'
999 assert target.text.strip() == 'tag: target'
994 assert target.getchildren() == []
1000 assert target.getchildren() == []
995
1001
996 @pytest.mark.parametrize('mergeable', [True, False])
1002 @pytest.mark.parametrize('mergeable', [True, False])
997 def test_shadow_repository_link(
1003 def test_shadow_repository_link(
998 self, mergeable, pr_util, http_host_only_stub):
1004 self, mergeable, pr_util, http_host_only_stub):
999 """
1005 """
1000 Check that the pull request summary page displays a link to the shadow
1006 Check that the pull request summary page displays a link to the shadow
1001 repository if the pull request is mergeable. If it is not mergeable
1007 repository if the pull request is mergeable. If it is not mergeable
1002 the link should not be displayed.
1008 the link should not be displayed.
1003 """
1009 """
1004 pull_request = pr_util.create_pull_request(
1010 pull_request = pr_util.create_pull_request(
1005 mergeable=mergeable, enable_notifications=False)
1011 mergeable=mergeable, enable_notifications=False)
1006 target_repo = pull_request.target_repo.scm_instance()
1012 target_repo = pull_request.target_repo.scm_instance()
1007 pr_id = pull_request.pull_request_id
1013 pr_id = pull_request.pull_request_id
1008 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1014 shadow_url = '{host}/{repo}/pull-request/{pr_id}/repository'.format(
1009 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1015 host=http_host_only_stub, repo=target_repo.name, pr_id=pr_id)
1010
1016
1011 response = self.app.get(route_path(
1017 response = self.app.get(route_path(
1012 'pullrequest_show',
1018 'pullrequest_show',
1013 repo_name=target_repo.name,
1019 repo_name=target_repo.name,
1014 pull_request_id=pr_id))
1020 pull_request_id=pr_id))
1015
1021
1016 assertr = AssertResponse(response)
1022 assertr = AssertResponse(response)
1017 if mergeable:
1023 if mergeable:
1018 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1024 assertr.element_value_contains('input.pr-mergeinfo', shadow_url)
1019 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1025 assertr.element_value_contains('input.pr-mergeinfo ', 'pr-merge')
1020 else:
1026 else:
1021 assertr.no_element_exists('.pr-mergeinfo')
1027 assertr.no_element_exists('.pr-mergeinfo')
1022
1028
1023
1029
1024 @pytest.mark.usefixtures('app')
1030 @pytest.mark.usefixtures('app')
1025 @pytest.mark.backends("git", "hg")
1031 @pytest.mark.backends("git", "hg")
1026 class TestPullrequestsControllerDelete(object):
1032 class TestPullrequestsControllerDelete(object):
1027 def test_pull_request_delete_button_permissions_admin(
1033 def test_pull_request_delete_button_permissions_admin(
1028 self, autologin_user, user_admin, pr_util):
1034 self, autologin_user, user_admin, pr_util):
1029 pull_request = pr_util.create_pull_request(
1035 pull_request = pr_util.create_pull_request(
1030 author=user_admin.username, enable_notifications=False)
1036 author=user_admin.username, enable_notifications=False)
1031
1037
1032 response = self.app.get(route_path(
1038 response = self.app.get(route_path(
1033 'pullrequest_show',
1039 'pullrequest_show',
1034 repo_name=pull_request.target_repo.scm_instance().name,
1040 repo_name=pull_request.target_repo.scm_instance().name,
1035 pull_request_id=pull_request.pull_request_id))
1041 pull_request_id=pull_request.pull_request_id))
1036
1042
1037 response.mustcontain('id="delete_pullrequest"')
1043 response.mustcontain('id="delete_pullrequest"')
1038 response.mustcontain('Confirm to delete this pull request')
1044 response.mustcontain('Confirm to delete this pull request')
1039
1045
1040 def test_pull_request_delete_button_permissions_owner(
1046 def test_pull_request_delete_button_permissions_owner(
1041 self, autologin_regular_user, user_regular, pr_util):
1047 self, autologin_regular_user, user_regular, pr_util):
1042 pull_request = pr_util.create_pull_request(
1048 pull_request = pr_util.create_pull_request(
1043 author=user_regular.username, enable_notifications=False)
1049 author=user_regular.username, enable_notifications=False)
1044
1050
1045 response = self.app.get(route_path(
1051 response = self.app.get(route_path(
1046 'pullrequest_show',
1052 'pullrequest_show',
1047 repo_name=pull_request.target_repo.scm_instance().name,
1053 repo_name=pull_request.target_repo.scm_instance().name,
1048 pull_request_id=pull_request.pull_request_id))
1054 pull_request_id=pull_request.pull_request_id))
1049
1055
1050 response.mustcontain('id="delete_pullrequest"')
1056 response.mustcontain('id="delete_pullrequest"')
1051 response.mustcontain('Confirm to delete this pull request')
1057 response.mustcontain('Confirm to delete this pull request')
1052
1058
1053 def test_pull_request_delete_button_permissions_forbidden(
1059 def test_pull_request_delete_button_permissions_forbidden(
1054 self, autologin_regular_user, user_regular, user_admin, pr_util):
1060 self, autologin_regular_user, user_regular, user_admin, pr_util):
1055 pull_request = pr_util.create_pull_request(
1061 pull_request = pr_util.create_pull_request(
1056 author=user_admin.username, enable_notifications=False)
1062 author=user_admin.username, enable_notifications=False)
1057
1063
1058 response = self.app.get(route_path(
1064 response = self.app.get(route_path(
1059 'pullrequest_show',
1065 'pullrequest_show',
1060 repo_name=pull_request.target_repo.scm_instance().name,
1066 repo_name=pull_request.target_repo.scm_instance().name,
1061 pull_request_id=pull_request.pull_request_id))
1067 pull_request_id=pull_request.pull_request_id))
1062 response.mustcontain(no=['id="delete_pullrequest"'])
1068 response.mustcontain(no=['id="delete_pullrequest"'])
1063 response.mustcontain(no=['Confirm to delete this pull request'])
1069 response.mustcontain(no=['Confirm to delete this pull request'])
1064
1070
1065 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1071 def test_pull_request_delete_button_permissions_can_update_cannot_delete(
1066 self, autologin_regular_user, user_regular, user_admin, pr_util,
1072 self, autologin_regular_user, user_regular, user_admin, pr_util,
1067 user_util):
1073 user_util):
1068
1074
1069 pull_request = pr_util.create_pull_request(
1075 pull_request = pr_util.create_pull_request(
1070 author=user_admin.username, enable_notifications=False)
1076 author=user_admin.username, enable_notifications=False)
1071
1077
1072 user_util.grant_user_permission_to_repo(
1078 user_util.grant_user_permission_to_repo(
1073 pull_request.target_repo, user_regular,
1079 pull_request.target_repo, user_regular,
1074 'repository.write')
1080 'repository.write')
1075
1081
1076 response = self.app.get(route_path(
1082 response = self.app.get(route_path(
1077 'pullrequest_show',
1083 'pullrequest_show',
1078 repo_name=pull_request.target_repo.scm_instance().name,
1084 repo_name=pull_request.target_repo.scm_instance().name,
1079 pull_request_id=pull_request.pull_request_id))
1085 pull_request_id=pull_request.pull_request_id))
1080
1086
1081 response.mustcontain('id="open_edit_pullrequest"')
1087 response.mustcontain('id="open_edit_pullrequest"')
1082 response.mustcontain('id="delete_pullrequest"')
1088 response.mustcontain('id="delete_pullrequest"')
1083 response.mustcontain(no=['Confirm to delete this pull request'])
1089 response.mustcontain(no=['Confirm to delete this pull request'])
1084
1090
1085 def test_delete_comment_returns_404_if_comment_does_not_exist(
1091 def test_delete_comment_returns_404_if_comment_does_not_exist(
1086 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1092 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1087
1093
1088 pull_request = pr_util.create_pull_request(
1094 pull_request = pr_util.create_pull_request(
1089 author=user_admin.username, enable_notifications=False)
1095 author=user_admin.username, enable_notifications=False)
1090
1096
1091 self.app.post(
1097 self.app.post(
1092 route_path(
1098 route_path(
1093 'pullrequest_comment_delete',
1099 'pullrequest_comment_delete',
1094 repo_name=pull_request.target_repo.scm_instance().name,
1100 repo_name=pull_request.target_repo.scm_instance().name,
1095 pull_request_id=pull_request.pull_request_id,
1101 pull_request_id=pull_request.pull_request_id,
1096 comment_id=1024404),
1102 comment_id=1024404),
1097 extra_environ=xhr_header,
1103 extra_environ=xhr_header,
1098 params={'csrf_token': csrf_token},
1104 params={'csrf_token': csrf_token},
1099 status=404
1105 status=404
1100 )
1106 )
1101
1107
1102 def test_delete_comment(
1108 def test_delete_comment(
1103 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1109 self, autologin_user, pr_util, user_admin, csrf_token, xhr_header):
1104
1110
1105 pull_request = pr_util.create_pull_request(
1111 pull_request = pr_util.create_pull_request(
1106 author=user_admin.username, enable_notifications=False)
1112 author=user_admin.username, enable_notifications=False)
1107 comment = pr_util.create_comment()
1113 comment = pr_util.create_comment()
1108 comment_id = comment.comment_id
1114 comment_id = comment.comment_id
1109
1115
1110 response = self.app.post(
1116 response = self.app.post(
1111 route_path(
1117 route_path(
1112 'pullrequest_comment_delete',
1118 'pullrequest_comment_delete',
1113 repo_name=pull_request.target_repo.scm_instance().name,
1119 repo_name=pull_request.target_repo.scm_instance().name,
1114 pull_request_id=pull_request.pull_request_id,
1120 pull_request_id=pull_request.pull_request_id,
1115 comment_id=comment_id),
1121 comment_id=comment_id),
1116 extra_environ=xhr_header,
1122 extra_environ=xhr_header,
1117 params={'csrf_token': csrf_token},
1123 params={'csrf_token': csrf_token},
1118 status=200
1124 status=200
1119 )
1125 )
1120 assert response.body == 'true'
1126 assert response.body == 'true'
1121
1127
1122
1128
1123 def assert_pull_request_status(pull_request, expected_status):
1129 def assert_pull_request_status(pull_request, expected_status):
1124 status = ChangesetStatusModel().calculated_review_status(
1130 status = ChangesetStatusModel().calculated_review_status(
1125 pull_request=pull_request)
1131 pull_request=pull_request)
1126 assert status == expected_status
1132 assert status == expected_status
1127
1133
1128
1134
1129 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1135 @pytest.mark.parametrize('route', ['pullrequest_new', 'pullrequest_create'])
1130 @pytest.mark.usefixtures("autologin_user")
1136 @pytest.mark.usefixtures("autologin_user")
1131 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1137 def test_forbidde_to_repo_summary_for_svn_repositories(backend_svn, app, route):
1132 response = app.get(
1138 response = app.get(
1133 route_path(route, repo_name=backend_svn.repo_name), status=404)
1139 route_path(route, repo_name=backend_svn.repo_name), status=404)
1134
1140
@@ -1,76 +1,79 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 from rhodecode.lib import helpers as h
21 from rhodecode.lib import helpers as h
22 from rhodecode.lib.utils2 import safe_int
22 from rhodecode.lib.utils2 import safe_int
23
23
24
24
25 def reviewer_as_json(user, reasons=None, mandatory=False):
25 def reviewer_as_json(user, reasons=None, mandatory=False, rules=None, user_group=None):
26 """
26 """
27 Returns json struct of a reviewer for frontend
27 Returns json struct of a reviewer for frontend
28
28
29 :param user: the reviewer
29 :param user: the reviewer
30 :param reasons: list of strings of why they are reviewers
30 :param reasons: list of strings of why they are reviewers
31 :param mandatory: bool, to set user as mandatory
31 :param mandatory: bool, to set user as mandatory
32 """
32 """
33
33
34 return {
34 return {
35 'user_id': user.user_id,
35 'user_id': user.user_id,
36 'reasons': reasons or [],
36 'reasons': reasons or [],
37 'rules': rules or [],
37 'mandatory': mandatory,
38 'mandatory': mandatory,
39 'user_group': user_group,
38 'username': user.username,
40 'username': user.username,
39 'first_name': user.first_name,
41 'first_name': user.first_name,
40 'last_name': user.last_name,
42 'last_name': user.last_name,
43 'user_link': h.link_to_user(user),
41 'gravatar_link': h.gravatar_url(user.email, 14),
44 'gravatar_link': h.gravatar_url(user.email, 14),
42 }
45 }
43
46
44
47
45 def get_default_reviewers_data(
48 def get_default_reviewers_data(
46 current_user, source_repo, source_commit, target_repo, target_commit):
49 current_user, source_repo, source_commit, target_repo, target_commit):
47
50
48 """ Return json for default reviewers of a repository """
51 """ Return json for default reviewers of a repository """
49
52
50 reasons = ['Default reviewer', 'Repository owner']
53 reasons = ['Default reviewer', 'Repository owner']
51 default = reviewer_as_json(
54 default = reviewer_as_json(
52 user=current_user, reasons=reasons, mandatory=False)
55 user=current_user, reasons=reasons, mandatory=False)
53
56
54 return {
57 return {
55 'api_ver': 'v1', # define version for later possible schema upgrade
58 'api_ver': 'v1', # define version for later possible schema upgrade
56 'reviewers': [default],
59 'reviewers': [default],
57 'rules': {},
60 'rules': {},
58 'rules_data': {},
61 'rules_data': {},
59 }
62 }
60
63
61
64
62 def validate_default_reviewers(review_members, reviewer_rules):
65 def validate_default_reviewers(review_members, reviewer_rules):
63 """
66 """
64 Function to validate submitted reviewers against the saved rules
67 Function to validate submitted reviewers against the saved rules
65
68
66 """
69 """
67 reviewers = []
70 reviewers = []
68 reviewer_by_id = {}
71 reviewer_by_id = {}
69 for r in review_members:
72 for r in review_members:
70 reviewer_user_id = safe_int(r['user_id'])
73 reviewer_user_id = safe_int(r['user_id'])
71 entry = (reviewer_user_id, r['reasons'], r['mandatory'])
74 entry = (reviewer_user_id, r['reasons'], r['mandatory'], r['rules'])
72
75
73 reviewer_by_id[reviewer_user_id] = entry
76 reviewer_by_id[reviewer_user_id] = entry
74 reviewers.append(entry)
77 reviewers.append(entry)
75
78
76 return reviewers
79 return reviewers
@@ -1,2072 +1,2077 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Helper functions
22 Helper functions
23
23
24 Consists of functions to typically be used within templates, but also
24 Consists of functions to typically be used within templates, but also
25 available to Controllers. This module is available to both as 'h'.
25 available to Controllers. This module is available to both as 'h'.
26 """
26 """
27
27
28 import random
28 import random
29 import hashlib
29 import hashlib
30 import StringIO
30 import StringIO
31 import urllib
31 import urllib
32 import math
32 import math
33 import logging
33 import logging
34 import re
34 import re
35 import urlparse
35 import urlparse
36 import time
36 import time
37 import string
37 import string
38 import hashlib
38 import hashlib
39 from collections import OrderedDict
39 from collections import OrderedDict
40
40
41 import pygments
41 import pygments
42 import itertools
42 import itertools
43 import fnmatch
43 import fnmatch
44
44
45 from datetime import datetime
45 from datetime import datetime
46 from functools import partial
46 from functools import partial
47 from pygments.formatters.html import HtmlFormatter
47 from pygments.formatters.html import HtmlFormatter
48 from pygments import highlight as code_highlight
48 from pygments import highlight as code_highlight
49 from pygments.lexers import (
49 from pygments.lexers import (
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
50 get_lexer_by_name, get_lexer_for_filename, get_lexer_for_mimetype)
51
51
52 from pyramid.threadlocal import get_current_request
52 from pyramid.threadlocal import get_current_request
53
53
54 from webhelpers.html import literal, HTML, escape
54 from webhelpers.html import literal, HTML, escape
55 from webhelpers.html.tools import *
55 from webhelpers.html.tools import *
56 from webhelpers.html.builder import make_tag
56 from webhelpers.html.builder import make_tag
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
57 from webhelpers.html.tags import auto_discovery_link, checkbox, css_classes, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
58 end_form, file, form as wh_form, hidden, image, javascript_link, link_to, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
59 link_to_if, link_to_unless, ol, required_legend, select, stylesheet_link, \
60 submit, text, password, textarea, title, ul, xml_declaration, radio
60 submit, text, password, textarea, title, ul, xml_declaration, radio
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
61 from webhelpers.html.tools import auto_link, button_to, highlight, \
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
62 js_obfuscate, mail_to, strip_links, strip_tags, tag_re
63 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
63 from webhelpers.text import chop_at, collapse, convert_accented_entities, \
64 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
64 convert_misc_entities, lchop, plural, rchop, remove_formatting, \
65 replace_whitespace, urlify, truncate, wrap_paragraphs
65 replace_whitespace, urlify, truncate, wrap_paragraphs
66 from webhelpers.date import time_ago_in_words
66 from webhelpers.date import time_ago_in_words
67 from webhelpers.paginate import Page as _Page
67 from webhelpers.paginate import Page as _Page
68 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
68 from webhelpers.html.tags import _set_input_attrs, _set_id_attr, \
69 convert_boolean_attrs, NotGiven, _make_safe_id_component
69 convert_boolean_attrs, NotGiven, _make_safe_id_component
70 from webhelpers2.number import format_byte_size
70 from webhelpers2.number import format_byte_size
71
71
72 from rhodecode.lib.action_parser import action_parser
72 from rhodecode.lib.action_parser import action_parser
73 from rhodecode.lib.ext_json import json
73 from rhodecode.lib.ext_json import json
74 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
74 from rhodecode.lib.utils import repo_name_slug, get_custom_lexer
75 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
75 from rhodecode.lib.utils2 import str2bool, safe_unicode, safe_str, \
76 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
76 get_commit_safe, datetime_to_time, time_to_datetime, time_to_utcdatetime, \
77 AttributeDict, safe_int, md5, md5_safe
77 AttributeDict, safe_int, md5, md5_safe
78 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
78 from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links
79 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
79 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
80 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
80 from rhodecode.lib.vcs.backends.base import BaseChangeset, EmptyCommit
81 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
81 from rhodecode.config.conf import DATE_FORMAT, DATETIME_FORMAT
82 from rhodecode.model.changeset_status import ChangesetStatusModel
82 from rhodecode.model.changeset_status import ChangesetStatusModel
83 from rhodecode.model.db import Permission, User, Repository
83 from rhodecode.model.db import Permission, User, Repository
84 from rhodecode.model.repo_group import RepoGroupModel
84 from rhodecode.model.repo_group import RepoGroupModel
85 from rhodecode.model.settings import IssueTrackerSettingsModel
85 from rhodecode.model.settings import IssueTrackerSettingsModel
86
86
87 log = logging.getLogger(__name__)
87 log = logging.getLogger(__name__)
88
88
89
89
90 DEFAULT_USER = User.DEFAULT_USER
90 DEFAULT_USER = User.DEFAULT_USER
91 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
91 DEFAULT_USER_EMAIL = User.DEFAULT_USER_EMAIL
92
92
93
93
94 def asset(path, ver=None, **kwargs):
94 def asset(path, ver=None, **kwargs):
95 """
95 """
96 Helper to generate a static asset file path for rhodecode assets
96 Helper to generate a static asset file path for rhodecode assets
97
97
98 eg. h.asset('images/image.png', ver='3923')
98 eg. h.asset('images/image.png', ver='3923')
99
99
100 :param path: path of asset
100 :param path: path of asset
101 :param ver: optional version query param to append as ?ver=
101 :param ver: optional version query param to append as ?ver=
102 """
102 """
103 request = get_current_request()
103 request = get_current_request()
104 query = {}
104 query = {}
105 query.update(kwargs)
105 query.update(kwargs)
106 if ver:
106 if ver:
107 query = {'ver': ver}
107 query = {'ver': ver}
108 return request.static_path(
108 return request.static_path(
109 'rhodecode:public/{}'.format(path), _query=query)
109 'rhodecode:public/{}'.format(path), _query=query)
110
110
111
111
112 default_html_escape_table = {
112 default_html_escape_table = {
113 ord('&'): u'&amp;',
113 ord('&'): u'&amp;',
114 ord('<'): u'&lt;',
114 ord('<'): u'&lt;',
115 ord('>'): u'&gt;',
115 ord('>'): u'&gt;',
116 ord('"'): u'&quot;',
116 ord('"'): u'&quot;',
117 ord("'"): u'&#39;',
117 ord("'"): u'&#39;',
118 }
118 }
119
119
120
120
121 def html_escape(text, html_escape_table=default_html_escape_table):
121 def html_escape(text, html_escape_table=default_html_escape_table):
122 """Produce entities within text."""
122 """Produce entities within text."""
123 return text.translate(html_escape_table)
123 return text.translate(html_escape_table)
124
124
125
125
126 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
126 def chop_at_smart(s, sub, inclusive=False, suffix_if_chopped=None):
127 """
127 """
128 Truncate string ``s`` at the first occurrence of ``sub``.
128 Truncate string ``s`` at the first occurrence of ``sub``.
129
129
130 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
130 If ``inclusive`` is true, truncate just after ``sub`` rather than at it.
131 """
131 """
132 suffix_if_chopped = suffix_if_chopped or ''
132 suffix_if_chopped = suffix_if_chopped or ''
133 pos = s.find(sub)
133 pos = s.find(sub)
134 if pos == -1:
134 if pos == -1:
135 return s
135 return s
136
136
137 if inclusive:
137 if inclusive:
138 pos += len(sub)
138 pos += len(sub)
139
139
140 chopped = s[:pos]
140 chopped = s[:pos]
141 left = s[pos:].strip()
141 left = s[pos:].strip()
142
142
143 if left and suffix_if_chopped:
143 if left and suffix_if_chopped:
144 chopped += suffix_if_chopped
144 chopped += suffix_if_chopped
145
145
146 return chopped
146 return chopped
147
147
148
148
149 def shorter(text, size=20):
149 def shorter(text, size=20):
150 postfix = '...'
150 postfix = '...'
151 if len(text) > size:
151 if len(text) > size:
152 return text[:size - len(postfix)] + postfix
152 return text[:size - len(postfix)] + postfix
153 return text
153 return text
154
154
155
155
156 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
156 def _reset(name, value=None, id=NotGiven, type="reset", **attrs):
157 """
157 """
158 Reset button
158 Reset button
159 """
159 """
160 _set_input_attrs(attrs, type, name, value)
160 _set_input_attrs(attrs, type, name, value)
161 _set_id_attr(attrs, id, name)
161 _set_id_attr(attrs, id, name)
162 convert_boolean_attrs(attrs, ["disabled"])
162 convert_boolean_attrs(attrs, ["disabled"])
163 return HTML.input(**attrs)
163 return HTML.input(**attrs)
164
164
165 reset = _reset
165 reset = _reset
166 safeid = _make_safe_id_component
166 safeid = _make_safe_id_component
167
167
168
168
169 def branding(name, length=40):
169 def branding(name, length=40):
170 return truncate(name, length, indicator="")
170 return truncate(name, length, indicator="")
171
171
172
172
173 def FID(raw_id, path):
173 def FID(raw_id, path):
174 """
174 """
175 Creates a unique ID for filenode based on it's hash of path and commit
175 Creates a unique ID for filenode based on it's hash of path and commit
176 it's safe to use in urls
176 it's safe to use in urls
177
177
178 :param raw_id:
178 :param raw_id:
179 :param path:
179 :param path:
180 """
180 """
181
181
182 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
182 return 'c-%s-%s' % (short_id(raw_id), md5_safe(path)[:12])
183
183
184
184
185 class _GetError(object):
185 class _GetError(object):
186 """Get error from form_errors, and represent it as span wrapped error
186 """Get error from form_errors, and represent it as span wrapped error
187 message
187 message
188
188
189 :param field_name: field to fetch errors for
189 :param field_name: field to fetch errors for
190 :param form_errors: form errors dict
190 :param form_errors: form errors dict
191 """
191 """
192
192
193 def __call__(self, field_name, form_errors):
193 def __call__(self, field_name, form_errors):
194 tmpl = """<span class="error_msg">%s</span>"""
194 tmpl = """<span class="error_msg">%s</span>"""
195 if form_errors and field_name in form_errors:
195 if form_errors and field_name in form_errors:
196 return literal(tmpl % form_errors.get(field_name))
196 return literal(tmpl % form_errors.get(field_name))
197
197
198 get_error = _GetError()
198 get_error = _GetError()
199
199
200
200
201 class _ToolTip(object):
201 class _ToolTip(object):
202
202
203 def __call__(self, tooltip_title, trim_at=50):
203 def __call__(self, tooltip_title, trim_at=50):
204 """
204 """
205 Special function just to wrap our text into nice formatted
205 Special function just to wrap our text into nice formatted
206 autowrapped text
206 autowrapped text
207
207
208 :param tooltip_title:
208 :param tooltip_title:
209 """
209 """
210 tooltip_title = escape(tooltip_title)
210 tooltip_title = escape(tooltip_title)
211 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
211 tooltip_title = tooltip_title.replace('<', '&lt;').replace('>', '&gt;')
212 return tooltip_title
212 return tooltip_title
213 tooltip = _ToolTip()
213 tooltip = _ToolTip()
214
214
215
215
216 def files_breadcrumbs(repo_name, commit_id, file_path):
216 def files_breadcrumbs(repo_name, commit_id, file_path):
217 if isinstance(file_path, str):
217 if isinstance(file_path, str):
218 file_path = safe_unicode(file_path)
218 file_path = safe_unicode(file_path)
219
219
220 # TODO: johbo: Is this always a url like path, or is this operating
220 # TODO: johbo: Is this always a url like path, or is this operating
221 # system dependent?
221 # system dependent?
222 path_segments = file_path.split('/')
222 path_segments = file_path.split('/')
223
223
224 repo_name_html = escape(repo_name)
224 repo_name_html = escape(repo_name)
225 if len(path_segments) == 1 and path_segments[0] == '':
225 if len(path_segments) == 1 and path_segments[0] == '':
226 url_segments = [repo_name_html]
226 url_segments = [repo_name_html]
227 else:
227 else:
228 url_segments = [
228 url_segments = [
229 link_to(
229 link_to(
230 repo_name_html,
230 repo_name_html,
231 route_path(
231 route_path(
232 'repo_files',
232 'repo_files',
233 repo_name=repo_name,
233 repo_name=repo_name,
234 commit_id=commit_id,
234 commit_id=commit_id,
235 f_path=''),
235 f_path=''),
236 class_='pjax-link')]
236 class_='pjax-link')]
237
237
238 last_cnt = len(path_segments) - 1
238 last_cnt = len(path_segments) - 1
239 for cnt, segment in enumerate(path_segments):
239 for cnt, segment in enumerate(path_segments):
240 if not segment:
240 if not segment:
241 continue
241 continue
242 segment_html = escape(segment)
242 segment_html = escape(segment)
243
243
244 if cnt != last_cnt:
244 if cnt != last_cnt:
245 url_segments.append(
245 url_segments.append(
246 link_to(
246 link_to(
247 segment_html,
247 segment_html,
248 route_path(
248 route_path(
249 'repo_files',
249 'repo_files',
250 repo_name=repo_name,
250 repo_name=repo_name,
251 commit_id=commit_id,
251 commit_id=commit_id,
252 f_path='/'.join(path_segments[:cnt + 1])),
252 f_path='/'.join(path_segments[:cnt + 1])),
253 class_='pjax-link'))
253 class_='pjax-link'))
254 else:
254 else:
255 url_segments.append(segment_html)
255 url_segments.append(segment_html)
256
256
257 return literal('/'.join(url_segments))
257 return literal('/'.join(url_segments))
258
258
259
259
260 class CodeHtmlFormatter(HtmlFormatter):
260 class CodeHtmlFormatter(HtmlFormatter):
261 """
261 """
262 My code Html Formatter for source codes
262 My code Html Formatter for source codes
263 """
263 """
264
264
265 def wrap(self, source, outfile):
265 def wrap(self, source, outfile):
266 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
266 return self._wrap_div(self._wrap_pre(self._wrap_code(source)))
267
267
268 def _wrap_code(self, source):
268 def _wrap_code(self, source):
269 for cnt, it in enumerate(source):
269 for cnt, it in enumerate(source):
270 i, t = it
270 i, t = it
271 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
271 t = '<div id="L%s">%s</div>' % (cnt + 1, t)
272 yield i, t
272 yield i, t
273
273
274 def _wrap_tablelinenos(self, inner):
274 def _wrap_tablelinenos(self, inner):
275 dummyoutfile = StringIO.StringIO()
275 dummyoutfile = StringIO.StringIO()
276 lncount = 0
276 lncount = 0
277 for t, line in inner:
277 for t, line in inner:
278 if t:
278 if t:
279 lncount += 1
279 lncount += 1
280 dummyoutfile.write(line)
280 dummyoutfile.write(line)
281
281
282 fl = self.linenostart
282 fl = self.linenostart
283 mw = len(str(lncount + fl - 1))
283 mw = len(str(lncount + fl - 1))
284 sp = self.linenospecial
284 sp = self.linenospecial
285 st = self.linenostep
285 st = self.linenostep
286 la = self.lineanchors
286 la = self.lineanchors
287 aln = self.anchorlinenos
287 aln = self.anchorlinenos
288 nocls = self.noclasses
288 nocls = self.noclasses
289 if sp:
289 if sp:
290 lines = []
290 lines = []
291
291
292 for i in range(fl, fl + lncount):
292 for i in range(fl, fl + lncount):
293 if i % st == 0:
293 if i % st == 0:
294 if i % sp == 0:
294 if i % sp == 0:
295 if aln:
295 if aln:
296 lines.append('<a href="#%s%d" class="special">%*d</a>' %
296 lines.append('<a href="#%s%d" class="special">%*d</a>' %
297 (la, i, mw, i))
297 (la, i, mw, i))
298 else:
298 else:
299 lines.append('<span class="special">%*d</span>' % (mw, i))
299 lines.append('<span class="special">%*d</span>' % (mw, i))
300 else:
300 else:
301 if aln:
301 if aln:
302 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
302 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
303 else:
303 else:
304 lines.append('%*d' % (mw, i))
304 lines.append('%*d' % (mw, i))
305 else:
305 else:
306 lines.append('')
306 lines.append('')
307 ls = '\n'.join(lines)
307 ls = '\n'.join(lines)
308 else:
308 else:
309 lines = []
309 lines = []
310 for i in range(fl, fl + lncount):
310 for i in range(fl, fl + lncount):
311 if i % st == 0:
311 if i % st == 0:
312 if aln:
312 if aln:
313 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
313 lines.append('<a href="#%s%d">%*d</a>' % (la, i, mw, i))
314 else:
314 else:
315 lines.append('%*d' % (mw, i))
315 lines.append('%*d' % (mw, i))
316 else:
316 else:
317 lines.append('')
317 lines.append('')
318 ls = '\n'.join(lines)
318 ls = '\n'.join(lines)
319
319
320 # in case you wonder about the seemingly redundant <div> here: since the
320 # in case you wonder about the seemingly redundant <div> here: since the
321 # content in the other cell also is wrapped in a div, some browsers in
321 # content in the other cell also is wrapped in a div, some browsers in
322 # some configurations seem to mess up the formatting...
322 # some configurations seem to mess up the formatting...
323 if nocls:
323 if nocls:
324 yield 0, ('<table class="%stable">' % self.cssclass +
324 yield 0, ('<table class="%stable">' % self.cssclass +
325 '<tr><td><div class="linenodiv" '
325 '<tr><td><div class="linenodiv" '
326 'style="background-color: #f0f0f0; padding-right: 10px">'
326 'style="background-color: #f0f0f0; padding-right: 10px">'
327 '<pre style="line-height: 125%">' +
327 '<pre style="line-height: 125%">' +
328 ls + '</pre></div></td><td id="hlcode" class="code">')
328 ls + '</pre></div></td><td id="hlcode" class="code">')
329 else:
329 else:
330 yield 0, ('<table class="%stable">' % self.cssclass +
330 yield 0, ('<table class="%stable">' % self.cssclass +
331 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
331 '<tr><td class="linenos"><div class="linenodiv"><pre>' +
332 ls + '</pre></div></td><td id="hlcode" class="code">')
332 ls + '</pre></div></td><td id="hlcode" class="code">')
333 yield 0, dummyoutfile.getvalue()
333 yield 0, dummyoutfile.getvalue()
334 yield 0, '</td></tr></table>'
334 yield 0, '</td></tr></table>'
335
335
336
336
337 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
337 class SearchContentCodeHtmlFormatter(CodeHtmlFormatter):
338 def __init__(self, **kw):
338 def __init__(self, **kw):
339 # only show these line numbers if set
339 # only show these line numbers if set
340 self.only_lines = kw.pop('only_line_numbers', [])
340 self.only_lines = kw.pop('only_line_numbers', [])
341 self.query_terms = kw.pop('query_terms', [])
341 self.query_terms = kw.pop('query_terms', [])
342 self.max_lines = kw.pop('max_lines', 5)
342 self.max_lines = kw.pop('max_lines', 5)
343 self.line_context = kw.pop('line_context', 3)
343 self.line_context = kw.pop('line_context', 3)
344 self.url = kw.pop('url', None)
344 self.url = kw.pop('url', None)
345
345
346 super(CodeHtmlFormatter, self).__init__(**kw)
346 super(CodeHtmlFormatter, self).__init__(**kw)
347
347
348 def _wrap_code(self, source):
348 def _wrap_code(self, source):
349 for cnt, it in enumerate(source):
349 for cnt, it in enumerate(source):
350 i, t = it
350 i, t = it
351 t = '<pre>%s</pre>' % t
351 t = '<pre>%s</pre>' % t
352 yield i, t
352 yield i, t
353
353
354 def _wrap_tablelinenos(self, inner):
354 def _wrap_tablelinenos(self, inner):
355 yield 0, '<table class="code-highlight %stable">' % self.cssclass
355 yield 0, '<table class="code-highlight %stable">' % self.cssclass
356
356
357 last_shown_line_number = 0
357 last_shown_line_number = 0
358 current_line_number = 1
358 current_line_number = 1
359
359
360 for t, line in inner:
360 for t, line in inner:
361 if not t:
361 if not t:
362 yield t, line
362 yield t, line
363 continue
363 continue
364
364
365 if current_line_number in self.only_lines:
365 if current_line_number in self.only_lines:
366 if last_shown_line_number + 1 != current_line_number:
366 if last_shown_line_number + 1 != current_line_number:
367 yield 0, '<tr>'
367 yield 0, '<tr>'
368 yield 0, '<td class="line">...</td>'
368 yield 0, '<td class="line">...</td>'
369 yield 0, '<td id="hlcode" class="code"></td>'
369 yield 0, '<td id="hlcode" class="code"></td>'
370 yield 0, '</tr>'
370 yield 0, '</tr>'
371
371
372 yield 0, '<tr>'
372 yield 0, '<tr>'
373 if self.url:
373 if self.url:
374 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
374 yield 0, '<td class="line"><a href="%s#L%i">%i</a></td>' % (
375 self.url, current_line_number, current_line_number)
375 self.url, current_line_number, current_line_number)
376 else:
376 else:
377 yield 0, '<td class="line"><a href="">%i</a></td>' % (
377 yield 0, '<td class="line"><a href="">%i</a></td>' % (
378 current_line_number)
378 current_line_number)
379 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
379 yield 0, '<td id="hlcode" class="code">' + line + '</td>'
380 yield 0, '</tr>'
380 yield 0, '</tr>'
381
381
382 last_shown_line_number = current_line_number
382 last_shown_line_number = current_line_number
383
383
384 current_line_number += 1
384 current_line_number += 1
385
385
386
386
387 yield 0, '</table>'
387 yield 0, '</table>'
388
388
389
389
390 def extract_phrases(text_query):
390 def extract_phrases(text_query):
391 """
391 """
392 Extracts phrases from search term string making sure phrases
392 Extracts phrases from search term string making sure phrases
393 contained in double quotes are kept together - and discarding empty values
393 contained in double quotes are kept together - and discarding empty values
394 or fully whitespace values eg.
394 or fully whitespace values eg.
395
395
396 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
396 'some text "a phrase" more' => ['some', 'text', 'a phrase', 'more']
397
397
398 """
398 """
399
399
400 in_phrase = False
400 in_phrase = False
401 buf = ''
401 buf = ''
402 phrases = []
402 phrases = []
403 for char in text_query:
403 for char in text_query:
404 if in_phrase:
404 if in_phrase:
405 if char == '"': # end phrase
405 if char == '"': # end phrase
406 phrases.append(buf)
406 phrases.append(buf)
407 buf = ''
407 buf = ''
408 in_phrase = False
408 in_phrase = False
409 continue
409 continue
410 else:
410 else:
411 buf += char
411 buf += char
412 continue
412 continue
413 else:
413 else:
414 if char == '"': # start phrase
414 if char == '"': # start phrase
415 in_phrase = True
415 in_phrase = True
416 phrases.append(buf)
416 phrases.append(buf)
417 buf = ''
417 buf = ''
418 continue
418 continue
419 elif char == ' ':
419 elif char == ' ':
420 phrases.append(buf)
420 phrases.append(buf)
421 buf = ''
421 buf = ''
422 continue
422 continue
423 else:
423 else:
424 buf += char
424 buf += char
425
425
426 phrases.append(buf)
426 phrases.append(buf)
427 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
427 phrases = [phrase.strip() for phrase in phrases if phrase.strip()]
428 return phrases
428 return phrases
429
429
430
430
431 def get_matching_offsets(text, phrases):
431 def get_matching_offsets(text, phrases):
432 """
432 """
433 Returns a list of string offsets in `text` that the list of `terms` match
433 Returns a list of string offsets in `text` that the list of `terms` match
434
434
435 >>> get_matching_offsets('some text here', ['some', 'here'])
435 >>> get_matching_offsets('some text here', ['some', 'here'])
436 [(0, 4), (10, 14)]
436 [(0, 4), (10, 14)]
437
437
438 """
438 """
439 offsets = []
439 offsets = []
440 for phrase in phrases:
440 for phrase in phrases:
441 for match in re.finditer(phrase, text):
441 for match in re.finditer(phrase, text):
442 offsets.append((match.start(), match.end()))
442 offsets.append((match.start(), match.end()))
443
443
444 return offsets
444 return offsets
445
445
446
446
447 def normalize_text_for_matching(x):
447 def normalize_text_for_matching(x):
448 """
448 """
449 Replaces all non alnum characters to spaces and lower cases the string,
449 Replaces all non alnum characters to spaces and lower cases the string,
450 useful for comparing two text strings without punctuation
450 useful for comparing two text strings without punctuation
451 """
451 """
452 return re.sub(r'[^\w]', ' ', x.lower())
452 return re.sub(r'[^\w]', ' ', x.lower())
453
453
454
454
455 def get_matching_line_offsets(lines, terms):
455 def get_matching_line_offsets(lines, terms):
456 """ Return a set of `lines` indices (starting from 1) matching a
456 """ Return a set of `lines` indices (starting from 1) matching a
457 text search query, along with `context` lines above/below matching lines
457 text search query, along with `context` lines above/below matching lines
458
458
459 :param lines: list of strings representing lines
459 :param lines: list of strings representing lines
460 :param terms: search term string to match in lines eg. 'some text'
460 :param terms: search term string to match in lines eg. 'some text'
461 :param context: number of lines above/below a matching line to add to result
461 :param context: number of lines above/below a matching line to add to result
462 :param max_lines: cut off for lines of interest
462 :param max_lines: cut off for lines of interest
463 eg.
463 eg.
464
464
465 text = '''
465 text = '''
466 words words words
466 words words words
467 words words words
467 words words words
468 some text some
468 some text some
469 words words words
469 words words words
470 words words words
470 words words words
471 text here what
471 text here what
472 '''
472 '''
473 get_matching_line_offsets(text, 'text', context=1)
473 get_matching_line_offsets(text, 'text', context=1)
474 {3: [(5, 9)], 6: [(0, 4)]]
474 {3: [(5, 9)], 6: [(0, 4)]]
475
475
476 """
476 """
477 matching_lines = {}
477 matching_lines = {}
478 phrases = [normalize_text_for_matching(phrase)
478 phrases = [normalize_text_for_matching(phrase)
479 for phrase in extract_phrases(terms)]
479 for phrase in extract_phrases(terms)]
480
480
481 for line_index, line in enumerate(lines, start=1):
481 for line_index, line in enumerate(lines, start=1):
482 match_offsets = get_matching_offsets(
482 match_offsets = get_matching_offsets(
483 normalize_text_for_matching(line), phrases)
483 normalize_text_for_matching(line), phrases)
484 if match_offsets:
484 if match_offsets:
485 matching_lines[line_index] = match_offsets
485 matching_lines[line_index] = match_offsets
486
486
487 return matching_lines
487 return matching_lines
488
488
489
489
490 def hsv_to_rgb(h, s, v):
490 def hsv_to_rgb(h, s, v):
491 """ Convert hsv color values to rgb """
491 """ Convert hsv color values to rgb """
492
492
493 if s == 0.0:
493 if s == 0.0:
494 return v, v, v
494 return v, v, v
495 i = int(h * 6.0) # XXX assume int() truncates!
495 i = int(h * 6.0) # XXX assume int() truncates!
496 f = (h * 6.0) - i
496 f = (h * 6.0) - i
497 p = v * (1.0 - s)
497 p = v * (1.0 - s)
498 q = v * (1.0 - s * f)
498 q = v * (1.0 - s * f)
499 t = v * (1.0 - s * (1.0 - f))
499 t = v * (1.0 - s * (1.0 - f))
500 i = i % 6
500 i = i % 6
501 if i == 0:
501 if i == 0:
502 return v, t, p
502 return v, t, p
503 if i == 1:
503 if i == 1:
504 return q, v, p
504 return q, v, p
505 if i == 2:
505 if i == 2:
506 return p, v, t
506 return p, v, t
507 if i == 3:
507 if i == 3:
508 return p, q, v
508 return p, q, v
509 if i == 4:
509 if i == 4:
510 return t, p, v
510 return t, p, v
511 if i == 5:
511 if i == 5:
512 return v, p, q
512 return v, p, q
513
513
514
514
515 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
515 def unique_color_generator(n=10000, saturation=0.10, lightness=0.95):
516 """
516 """
517 Generator for getting n of evenly distributed colors using
517 Generator for getting n of evenly distributed colors using
518 hsv color and golden ratio. It always return same order of colors
518 hsv color and golden ratio. It always return same order of colors
519
519
520 :param n: number of colors to generate
520 :param n: number of colors to generate
521 :param saturation: saturation of returned colors
521 :param saturation: saturation of returned colors
522 :param lightness: lightness of returned colors
522 :param lightness: lightness of returned colors
523 :returns: RGB tuple
523 :returns: RGB tuple
524 """
524 """
525
525
526 golden_ratio = 0.618033988749895
526 golden_ratio = 0.618033988749895
527 h = 0.22717784590367374
527 h = 0.22717784590367374
528
528
529 for _ in xrange(n):
529 for _ in xrange(n):
530 h += golden_ratio
530 h += golden_ratio
531 h %= 1
531 h %= 1
532 HSV_tuple = [h, saturation, lightness]
532 HSV_tuple = [h, saturation, lightness]
533 RGB_tuple = hsv_to_rgb(*HSV_tuple)
533 RGB_tuple = hsv_to_rgb(*HSV_tuple)
534 yield map(lambda x: str(int(x * 256)), RGB_tuple)
534 yield map(lambda x: str(int(x * 256)), RGB_tuple)
535
535
536
536
537 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
537 def color_hasher(n=10000, saturation=0.10, lightness=0.95):
538 """
538 """
539 Returns a function which when called with an argument returns a unique
539 Returns a function which when called with an argument returns a unique
540 color for that argument, eg.
540 color for that argument, eg.
541
541
542 :param n: number of colors to generate
542 :param n: number of colors to generate
543 :param saturation: saturation of returned colors
543 :param saturation: saturation of returned colors
544 :param lightness: lightness of returned colors
544 :param lightness: lightness of returned colors
545 :returns: css RGB string
545 :returns: css RGB string
546
546
547 >>> color_hash = color_hasher()
547 >>> color_hash = color_hasher()
548 >>> color_hash('hello')
548 >>> color_hash('hello')
549 'rgb(34, 12, 59)'
549 'rgb(34, 12, 59)'
550 >>> color_hash('hello')
550 >>> color_hash('hello')
551 'rgb(34, 12, 59)'
551 'rgb(34, 12, 59)'
552 >>> color_hash('other')
552 >>> color_hash('other')
553 'rgb(90, 224, 159)'
553 'rgb(90, 224, 159)'
554 """
554 """
555
555
556 color_dict = {}
556 color_dict = {}
557 cgenerator = unique_color_generator(
557 cgenerator = unique_color_generator(
558 saturation=saturation, lightness=lightness)
558 saturation=saturation, lightness=lightness)
559
559
560 def get_color_string(thing):
560 def get_color_string(thing):
561 if thing in color_dict:
561 if thing in color_dict:
562 col = color_dict[thing]
562 col = color_dict[thing]
563 else:
563 else:
564 col = color_dict[thing] = cgenerator.next()
564 col = color_dict[thing] = cgenerator.next()
565 return "rgb(%s)" % (', '.join(col))
565 return "rgb(%s)" % (', '.join(col))
566
566
567 return get_color_string
567 return get_color_string
568
568
569
569
570 def get_lexer_safe(mimetype=None, filepath=None):
570 def get_lexer_safe(mimetype=None, filepath=None):
571 """
571 """
572 Tries to return a relevant pygments lexer using mimetype/filepath name,
572 Tries to return a relevant pygments lexer using mimetype/filepath name,
573 defaulting to plain text if none could be found
573 defaulting to plain text if none could be found
574 """
574 """
575 lexer = None
575 lexer = None
576 try:
576 try:
577 if mimetype:
577 if mimetype:
578 lexer = get_lexer_for_mimetype(mimetype)
578 lexer = get_lexer_for_mimetype(mimetype)
579 if not lexer:
579 if not lexer:
580 lexer = get_lexer_for_filename(filepath)
580 lexer = get_lexer_for_filename(filepath)
581 except pygments.util.ClassNotFound:
581 except pygments.util.ClassNotFound:
582 pass
582 pass
583
583
584 if not lexer:
584 if not lexer:
585 lexer = get_lexer_by_name('text')
585 lexer = get_lexer_by_name('text')
586
586
587 return lexer
587 return lexer
588
588
589
589
590 def get_lexer_for_filenode(filenode):
590 def get_lexer_for_filenode(filenode):
591 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
591 lexer = get_custom_lexer(filenode.extension) or filenode.lexer
592 return lexer
592 return lexer
593
593
594
594
595 def pygmentize(filenode, **kwargs):
595 def pygmentize(filenode, **kwargs):
596 """
596 """
597 pygmentize function using pygments
597 pygmentize function using pygments
598
598
599 :param filenode:
599 :param filenode:
600 """
600 """
601 lexer = get_lexer_for_filenode(filenode)
601 lexer = get_lexer_for_filenode(filenode)
602 return literal(code_highlight(filenode.content, lexer,
602 return literal(code_highlight(filenode.content, lexer,
603 CodeHtmlFormatter(**kwargs)))
603 CodeHtmlFormatter(**kwargs)))
604
604
605
605
606 def is_following_repo(repo_name, user_id):
606 def is_following_repo(repo_name, user_id):
607 from rhodecode.model.scm import ScmModel
607 from rhodecode.model.scm import ScmModel
608 return ScmModel().is_following_repo(repo_name, user_id)
608 return ScmModel().is_following_repo(repo_name, user_id)
609
609
610
610
611 class _Message(object):
611 class _Message(object):
612 """A message returned by ``Flash.pop_messages()``.
612 """A message returned by ``Flash.pop_messages()``.
613
613
614 Converting the message to a string returns the message text. Instances
614 Converting the message to a string returns the message text. Instances
615 also have the following attributes:
615 also have the following attributes:
616
616
617 * ``message``: the message text.
617 * ``message``: the message text.
618 * ``category``: the category specified when the message was created.
618 * ``category``: the category specified when the message was created.
619 """
619 """
620
620
621 def __init__(self, category, message):
621 def __init__(self, category, message):
622 self.category = category
622 self.category = category
623 self.message = message
623 self.message = message
624
624
625 def __str__(self):
625 def __str__(self):
626 return self.message
626 return self.message
627
627
628 __unicode__ = __str__
628 __unicode__ = __str__
629
629
630 def __html__(self):
630 def __html__(self):
631 return escape(safe_unicode(self.message))
631 return escape(safe_unicode(self.message))
632
632
633
633
634 class Flash(object):
634 class Flash(object):
635 # List of allowed categories. If None, allow any category.
635 # List of allowed categories. If None, allow any category.
636 categories = ["warning", "notice", "error", "success"]
636 categories = ["warning", "notice", "error", "success"]
637
637
638 # Default category if none is specified.
638 # Default category if none is specified.
639 default_category = "notice"
639 default_category = "notice"
640
640
641 def __init__(self, session_key="flash", categories=None,
641 def __init__(self, session_key="flash", categories=None,
642 default_category=None):
642 default_category=None):
643 """
643 """
644 Instantiate a ``Flash`` object.
644 Instantiate a ``Flash`` object.
645
645
646 ``session_key`` is the key to save the messages under in the user's
646 ``session_key`` is the key to save the messages under in the user's
647 session.
647 session.
648
648
649 ``categories`` is an optional list which overrides the default list
649 ``categories`` is an optional list which overrides the default list
650 of categories.
650 of categories.
651
651
652 ``default_category`` overrides the default category used for messages
652 ``default_category`` overrides the default category used for messages
653 when none is specified.
653 when none is specified.
654 """
654 """
655 self.session_key = session_key
655 self.session_key = session_key
656 if categories is not None:
656 if categories is not None:
657 self.categories = categories
657 self.categories = categories
658 if default_category is not None:
658 if default_category is not None:
659 self.default_category = default_category
659 self.default_category = default_category
660 if self.categories and self.default_category not in self.categories:
660 if self.categories and self.default_category not in self.categories:
661 raise ValueError(
661 raise ValueError(
662 "unrecognized default category %r" % (self.default_category,))
662 "unrecognized default category %r" % (self.default_category,))
663
663
664 def pop_messages(self, session=None, request=None):
664 def pop_messages(self, session=None, request=None):
665 """
665 """
666 Return all accumulated messages and delete them from the session.
666 Return all accumulated messages and delete them from the session.
667
667
668 The return value is a list of ``Message`` objects.
668 The return value is a list of ``Message`` objects.
669 """
669 """
670 messages = []
670 messages = []
671
671
672 if not session:
672 if not session:
673 if not request:
673 if not request:
674 request = get_current_request()
674 request = get_current_request()
675 session = request.session
675 session = request.session
676
676
677 # Pop the 'old' pylons flash messages. They are tuples of the form
677 # Pop the 'old' pylons flash messages. They are tuples of the form
678 # (category, message)
678 # (category, message)
679 for cat, msg in session.pop(self.session_key, []):
679 for cat, msg in session.pop(self.session_key, []):
680 messages.append(_Message(cat, msg))
680 messages.append(_Message(cat, msg))
681
681
682 # Pop the 'new' pyramid flash messages for each category as list
682 # Pop the 'new' pyramid flash messages for each category as list
683 # of strings.
683 # of strings.
684 for cat in self.categories:
684 for cat in self.categories:
685 for msg in session.pop_flash(queue=cat):
685 for msg in session.pop_flash(queue=cat):
686 messages.append(_Message(cat, msg))
686 messages.append(_Message(cat, msg))
687 # Map messages from the default queue to the 'notice' category.
687 # Map messages from the default queue to the 'notice' category.
688 for msg in session.pop_flash():
688 for msg in session.pop_flash():
689 messages.append(_Message('notice', msg))
689 messages.append(_Message('notice', msg))
690
690
691 session.save()
691 session.save()
692 return messages
692 return messages
693
693
694 def json_alerts(self, session=None, request=None):
694 def json_alerts(self, session=None, request=None):
695 payloads = []
695 payloads = []
696 messages = flash.pop_messages(session=session, request=request)
696 messages = flash.pop_messages(session=session, request=request)
697 if messages:
697 if messages:
698 for message in messages:
698 for message in messages:
699 subdata = {}
699 subdata = {}
700 if hasattr(message.message, 'rsplit'):
700 if hasattr(message.message, 'rsplit'):
701 flash_data = message.message.rsplit('|DELIM|', 1)
701 flash_data = message.message.rsplit('|DELIM|', 1)
702 org_message = flash_data[0]
702 org_message = flash_data[0]
703 if len(flash_data) > 1:
703 if len(flash_data) > 1:
704 subdata = json.loads(flash_data[1])
704 subdata = json.loads(flash_data[1])
705 else:
705 else:
706 org_message = message.message
706 org_message = message.message
707 payloads.append({
707 payloads.append({
708 'message': {
708 'message': {
709 'message': u'{}'.format(org_message),
709 'message': u'{}'.format(org_message),
710 'level': message.category,
710 'level': message.category,
711 'force': True,
711 'force': True,
712 'subdata': subdata
712 'subdata': subdata
713 }
713 }
714 })
714 })
715 return json.dumps(payloads)
715 return json.dumps(payloads)
716
716
717 def __call__(self, message, category=None, ignore_duplicate=False,
717 def __call__(self, message, category=None, ignore_duplicate=False,
718 session=None, request=None):
718 session=None, request=None):
719
719
720 if not session:
720 if not session:
721 if not request:
721 if not request:
722 request = get_current_request()
722 request = get_current_request()
723 session = request.session
723 session = request.session
724
724
725 session.flash(
725 session.flash(
726 message, queue=category, allow_duplicate=not ignore_duplicate)
726 message, queue=category, allow_duplicate=not ignore_duplicate)
727
727
728
728
729 flash = Flash()
729 flash = Flash()
730
730
731 #==============================================================================
731 #==============================================================================
732 # SCM FILTERS available via h.
732 # SCM FILTERS available via h.
733 #==============================================================================
733 #==============================================================================
734 from rhodecode.lib.vcs.utils import author_name, author_email
734 from rhodecode.lib.vcs.utils import author_name, author_email
735 from rhodecode.lib.utils2 import credentials_filter, age as _age
735 from rhodecode.lib.utils2 import credentials_filter, age as _age
736 from rhodecode.model.db import User, ChangesetStatus
736 from rhodecode.model.db import User, ChangesetStatus
737
737
738 age = _age
738 age = _age
739 capitalize = lambda x: x.capitalize()
739 capitalize = lambda x: x.capitalize()
740 email = author_email
740 email = author_email
741 short_id = lambda x: x[:12]
741 short_id = lambda x: x[:12]
742 hide_credentials = lambda x: ''.join(credentials_filter(x))
742 hide_credentials = lambda x: ''.join(credentials_filter(x))
743
743
744
744
745 def age_component(datetime_iso, value=None, time_is_local=False):
745 def age_component(datetime_iso, value=None, time_is_local=False):
746 title = value or format_date(datetime_iso)
746 title = value or format_date(datetime_iso)
747 tzinfo = '+00:00'
747 tzinfo = '+00:00'
748
748
749 # detect if we have a timezone info, otherwise, add it
749 # detect if we have a timezone info, otherwise, add it
750 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
750 if isinstance(datetime_iso, datetime) and not datetime_iso.tzinfo:
751 if time_is_local:
751 if time_is_local:
752 tzinfo = time.strftime("+%H:%M",
752 tzinfo = time.strftime("+%H:%M",
753 time.gmtime(
753 time.gmtime(
754 (datetime.now() - datetime.utcnow()).seconds + 1
754 (datetime.now() - datetime.utcnow()).seconds + 1
755 )
755 )
756 )
756 )
757
757
758 return literal(
758 return literal(
759 '<time class="timeago tooltip" '
759 '<time class="timeago tooltip" '
760 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
760 'title="{1}{2}" datetime="{0}{2}">{1}</time>'.format(
761 datetime_iso, title, tzinfo))
761 datetime_iso, title, tzinfo))
762
762
763
763
764 def _shorten_commit_id(commit_id):
764 def _shorten_commit_id(commit_id):
765 from rhodecode import CONFIG
765 from rhodecode import CONFIG
766 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
766 def_len = safe_int(CONFIG.get('rhodecode_show_sha_length', 12))
767 return commit_id[:def_len]
767 return commit_id[:def_len]
768
768
769
769
770 def show_id(commit):
770 def show_id(commit):
771 """
771 """
772 Configurable function that shows ID
772 Configurable function that shows ID
773 by default it's r123:fffeeefffeee
773 by default it's r123:fffeeefffeee
774
774
775 :param commit: commit instance
775 :param commit: commit instance
776 """
776 """
777 from rhodecode import CONFIG
777 from rhodecode import CONFIG
778 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
778 show_idx = str2bool(CONFIG.get('rhodecode_show_revision_number', True))
779
779
780 raw_id = _shorten_commit_id(commit.raw_id)
780 raw_id = _shorten_commit_id(commit.raw_id)
781 if show_idx:
781 if show_idx:
782 return 'r%s:%s' % (commit.idx, raw_id)
782 return 'r%s:%s' % (commit.idx, raw_id)
783 else:
783 else:
784 return '%s' % (raw_id, )
784 return '%s' % (raw_id, )
785
785
786
786
787 def format_date(date):
787 def format_date(date):
788 """
788 """
789 use a standardized formatting for dates used in RhodeCode
789 use a standardized formatting for dates used in RhodeCode
790
790
791 :param date: date/datetime object
791 :param date: date/datetime object
792 :return: formatted date
792 :return: formatted date
793 """
793 """
794
794
795 if date:
795 if date:
796 _fmt = "%a, %d %b %Y %H:%M:%S"
796 _fmt = "%a, %d %b %Y %H:%M:%S"
797 return safe_unicode(date.strftime(_fmt))
797 return safe_unicode(date.strftime(_fmt))
798
798
799 return u""
799 return u""
800
800
801
801
802 class _RepoChecker(object):
802 class _RepoChecker(object):
803
803
804 def __init__(self, backend_alias):
804 def __init__(self, backend_alias):
805 self._backend_alias = backend_alias
805 self._backend_alias = backend_alias
806
806
807 def __call__(self, repository):
807 def __call__(self, repository):
808 if hasattr(repository, 'alias'):
808 if hasattr(repository, 'alias'):
809 _type = repository.alias
809 _type = repository.alias
810 elif hasattr(repository, 'repo_type'):
810 elif hasattr(repository, 'repo_type'):
811 _type = repository.repo_type
811 _type = repository.repo_type
812 else:
812 else:
813 _type = repository
813 _type = repository
814 return _type == self._backend_alias
814 return _type == self._backend_alias
815
815
816 is_git = _RepoChecker('git')
816 is_git = _RepoChecker('git')
817 is_hg = _RepoChecker('hg')
817 is_hg = _RepoChecker('hg')
818 is_svn = _RepoChecker('svn')
818 is_svn = _RepoChecker('svn')
819
819
820
820
821 def get_repo_type_by_name(repo_name):
821 def get_repo_type_by_name(repo_name):
822 repo = Repository.get_by_repo_name(repo_name)
822 repo = Repository.get_by_repo_name(repo_name)
823 return repo.repo_type
823 return repo.repo_type
824
824
825
825
826 def is_svn_without_proxy(repository):
826 def is_svn_without_proxy(repository):
827 if is_svn(repository):
827 if is_svn(repository):
828 from rhodecode.model.settings import VcsSettingsModel
828 from rhodecode.model.settings import VcsSettingsModel
829 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
829 conf = VcsSettingsModel().get_ui_settings_as_config_obj()
830 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
830 return not str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
831 return False
831 return False
832
832
833
833
834 def discover_user(author):
834 def discover_user(author):
835 """
835 """
836 Tries to discover RhodeCode User based on the autho string. Author string
836 Tries to discover RhodeCode User based on the autho string. Author string
837 is typically `FirstName LastName <email@address.com>`
837 is typically `FirstName LastName <email@address.com>`
838 """
838 """
839
839
840 # if author is already an instance use it for extraction
840 # if author is already an instance use it for extraction
841 if isinstance(author, User):
841 if isinstance(author, User):
842 return author
842 return author
843
843
844 # Valid email in the attribute passed, see if they're in the system
844 # Valid email in the attribute passed, see if they're in the system
845 _email = author_email(author)
845 _email = author_email(author)
846 if _email != '':
846 if _email != '':
847 user = User.get_by_email(_email, case_insensitive=True, cache=True)
847 user = User.get_by_email(_email, case_insensitive=True, cache=True)
848 if user is not None:
848 if user is not None:
849 return user
849 return user
850
850
851 # Maybe it's a username, we try to extract it and fetch by username ?
851 # Maybe it's a username, we try to extract it and fetch by username ?
852 _author = author_name(author)
852 _author = author_name(author)
853 user = User.get_by_username(_author, case_insensitive=True, cache=True)
853 user = User.get_by_username(_author, case_insensitive=True, cache=True)
854 if user is not None:
854 if user is not None:
855 return user
855 return user
856
856
857 return None
857 return None
858
858
859
859
860 def email_or_none(author):
860 def email_or_none(author):
861 # extract email from the commit string
861 # extract email from the commit string
862 _email = author_email(author)
862 _email = author_email(author)
863
863
864 # If we have an email, use it, otherwise
864 # If we have an email, use it, otherwise
865 # see if it contains a username we can get an email from
865 # see if it contains a username we can get an email from
866 if _email != '':
866 if _email != '':
867 return _email
867 return _email
868 else:
868 else:
869 user = User.get_by_username(
869 user = User.get_by_username(
870 author_name(author), case_insensitive=True, cache=True)
870 author_name(author), case_insensitive=True, cache=True)
871
871
872 if user is not None:
872 if user is not None:
873 return user.email
873 return user.email
874
874
875 # No valid email, not a valid user in the system, none!
875 # No valid email, not a valid user in the system, none!
876 return None
876 return None
877
877
878
878
879 def link_to_user(author, length=0, **kwargs):
879 def link_to_user(author, length=0, **kwargs):
880 user = discover_user(author)
880 user = discover_user(author)
881 # user can be None, but if we have it already it means we can re-use it
881 # user can be None, but if we have it already it means we can re-use it
882 # in the person() function, so we save 1 intensive-query
882 # in the person() function, so we save 1 intensive-query
883 if user:
883 if user:
884 author = user
884 author = user
885
885
886 display_person = person(author, 'username_or_name_or_email')
886 display_person = person(author, 'username_or_name_or_email')
887 if length:
887 if length:
888 display_person = shorter(display_person, length)
888 display_person = shorter(display_person, length)
889
889
890 if user:
890 if user:
891 return link_to(
891 return link_to(
892 escape(display_person),
892 escape(display_person),
893 route_path('user_profile', username=user.username),
893 route_path('user_profile', username=user.username),
894 **kwargs)
894 **kwargs)
895 else:
895 else:
896 return escape(display_person)
896 return escape(display_person)
897
897
898
898
899 def person(author, show_attr="username_and_name"):
899 def person(author, show_attr="username_and_name"):
900 user = discover_user(author)
900 user = discover_user(author)
901 if user:
901 if user:
902 return getattr(user, show_attr)
902 return getattr(user, show_attr)
903 else:
903 else:
904 _author = author_name(author)
904 _author = author_name(author)
905 _email = email(author)
905 _email = email(author)
906 return _author or _email
906 return _author or _email
907
907
908
908
909 def author_string(email):
909 def author_string(email):
910 if email:
910 if email:
911 user = User.get_by_email(email, case_insensitive=True, cache=True)
911 user = User.get_by_email(email, case_insensitive=True, cache=True)
912 if user:
912 if user:
913 if user.first_name or user.last_name:
913 if user.first_name or user.last_name:
914 return '%s %s &lt;%s&gt;' % (
914 return '%s %s &lt;%s&gt;' % (
915 user.first_name, user.last_name, email)
915 user.first_name, user.last_name, email)
916 else:
916 else:
917 return email
917 return email
918 else:
918 else:
919 return email
919 return email
920 else:
920 else:
921 return None
921 return None
922
922
923
923
924 def person_by_id(id_, show_attr="username_and_name"):
924 def person_by_id(id_, show_attr="username_and_name"):
925 # attr to return from fetched user
925 # attr to return from fetched user
926 person_getter = lambda usr: getattr(usr, show_attr)
926 person_getter = lambda usr: getattr(usr, show_attr)
927
927
928 #maybe it's an ID ?
928 #maybe it's an ID ?
929 if str(id_).isdigit() or isinstance(id_, int):
929 if str(id_).isdigit() or isinstance(id_, int):
930 id_ = int(id_)
930 id_ = int(id_)
931 user = User.get(id_)
931 user = User.get(id_)
932 if user is not None:
932 if user is not None:
933 return person_getter(user)
933 return person_getter(user)
934 return id_
934 return id_
935
935
936
936
937 def gravatar_with_user(request, author, show_disabled=False):
937 def gravatar_with_user(request, author, show_disabled=False):
938 _render = request.get_partial_renderer(
938 _render = request.get_partial_renderer(
939 'rhodecode:templates/base/base.mako')
939 'rhodecode:templates/base/base.mako')
940 return _render('gravatar_with_user', author, show_disabled=show_disabled)
940 return _render('gravatar_with_user', author, show_disabled=show_disabled)
941
941
942
942
943 tags_paterns = OrderedDict((
943 tags_paterns = OrderedDict((
944 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
944 ('lang', (re.compile(r'\[(lang|language)\ \=\&gt;\ *([a-zA-Z\-\/\#\+\.]*)\]'),
945 '<div class="metatag" tag="lang">\\2</div>')),
945 '<div class="metatag" tag="lang">\\2</div>')),
946
946
947 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
947 ('see', (re.compile(r'\[see\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
948 '<div class="metatag" tag="see">see: \\1 </div>')),
948 '<div class="metatag" tag="see">see: \\1 </div>')),
949
949
950 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
950 ('url', (re.compile(r'\[url\ \=\&gt;\ \[([a-zA-Z0-9\ \.\-\_]+)\]\((http://|https://|/)(.*?)\)\]'),
951 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
951 '<div class="metatag" tag="url"> <a href="\\2\\3">\\1</a> </div>')),
952
952
953 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
953 ('license', (re.compile(r'\[license\ \=\&gt;\ *([a-zA-Z0-9\/\=\?\&amp;\ \:\/\.\-]*)\]'),
954 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
954 '<div class="metatag" tag="license"><a href="http:\/\/www.opensource.org/licenses/\\1">\\1</a></div>')),
955
955
956 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
956 ('ref', (re.compile(r'\[(requires|recommends|conflicts|base)\ \=\&gt;\ *([a-zA-Z0-9\-\/]*)\]'),
957 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
957 '<div class="metatag" tag="ref \\1">\\1: <a href="/\\2">\\2</a></div>')),
958
958
959 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
959 ('state', (re.compile(r'\[(stable|featured|stale|dead|dev|deprecated)\]'),
960 '<div class="metatag" tag="state \\1">\\1</div>')),
960 '<div class="metatag" tag="state \\1">\\1</div>')),
961
961
962 # label in grey
962 # label in grey
963 ('label', (re.compile(r'\[([a-z]+)\]'),
963 ('label', (re.compile(r'\[([a-z]+)\]'),
964 '<div class="metatag" tag="label">\\1</div>')),
964 '<div class="metatag" tag="label">\\1</div>')),
965
965
966 # generic catch all in grey
966 # generic catch all in grey
967 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
967 ('generic', (re.compile(r'\[([a-zA-Z0-9\.\-\_]+)\]'),
968 '<div class="metatag" tag="generic">\\1</div>')),
968 '<div class="metatag" tag="generic">\\1</div>')),
969 ))
969 ))
970
970
971
971
972 def extract_metatags(value):
972 def extract_metatags(value):
973 """
973 """
974 Extract supported meta-tags from given text value
974 Extract supported meta-tags from given text value
975 """
975 """
976 tags = []
976 tags = []
977 if not value:
977 if not value:
978 return tags, ''
978 return tags, ''
979
979
980 for key, val in tags_paterns.items():
980 for key, val in tags_paterns.items():
981 pat, replace_html = val
981 pat, replace_html = val
982 tags.extend([(key, x.group()) for x in pat.finditer(value)])
982 tags.extend([(key, x.group()) for x in pat.finditer(value)])
983 value = pat.sub('', value)
983 value = pat.sub('', value)
984
984
985 return tags, value
985 return tags, value
986
986
987
987
988 def style_metatag(tag_type, value):
988 def style_metatag(tag_type, value):
989 """
989 """
990 converts tags from value into html equivalent
990 converts tags from value into html equivalent
991 """
991 """
992 if not value:
992 if not value:
993 return ''
993 return ''
994
994
995 html_value = value
995 html_value = value
996 tag_data = tags_paterns.get(tag_type)
996 tag_data = tags_paterns.get(tag_type)
997 if tag_data:
997 if tag_data:
998 pat, replace_html = tag_data
998 pat, replace_html = tag_data
999 # convert to plain `unicode` instead of a markup tag to be used in
999 # convert to plain `unicode` instead of a markup tag to be used in
1000 # regex expressions. safe_unicode doesn't work here
1000 # regex expressions. safe_unicode doesn't work here
1001 html_value = pat.sub(replace_html, unicode(value))
1001 html_value = pat.sub(replace_html, unicode(value))
1002
1002
1003 return html_value
1003 return html_value
1004
1004
1005
1005
1006 def bool2icon(value):
1006 def bool2icon(value):
1007 """
1007 """
1008 Returns boolean value of a given value, represented as html element with
1008 Returns boolean value of a given value, represented as html element with
1009 classes that will represent icons
1009 classes that will represent icons
1010
1010
1011 :param value: given value to convert to html node
1011 :param value: given value to convert to html node
1012 """
1012 """
1013
1013
1014 if value: # does bool conversion
1014 if value: # does bool conversion
1015 return HTML.tag('i', class_="icon-true")
1015 return HTML.tag('i', class_="icon-true")
1016 else: # not true as bool
1016 else: # not true as bool
1017 return HTML.tag('i', class_="icon-false")
1017 return HTML.tag('i', class_="icon-false")
1018
1018
1019
1019
1020 #==============================================================================
1020 #==============================================================================
1021 # PERMS
1021 # PERMS
1022 #==============================================================================
1022 #==============================================================================
1023 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1023 from rhodecode.lib.auth import HasPermissionAny, HasPermissionAll, \
1024 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1024 HasRepoPermissionAny, HasRepoPermissionAll, HasRepoGroupPermissionAll, \
1025 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1025 HasRepoGroupPermissionAny, HasRepoPermissionAnyApi, get_csrf_token, \
1026 csrf_token_key
1026 csrf_token_key
1027
1027
1028
1028
1029 #==============================================================================
1029 #==============================================================================
1030 # GRAVATAR URL
1030 # GRAVATAR URL
1031 #==============================================================================
1031 #==============================================================================
1032 class InitialsGravatar(object):
1032 class InitialsGravatar(object):
1033 def __init__(self, email_address, first_name, last_name, size=30,
1033 def __init__(self, email_address, first_name, last_name, size=30,
1034 background=None, text_color='#fff'):
1034 background=None, text_color='#fff'):
1035 self.size = size
1035 self.size = size
1036 self.first_name = first_name
1036 self.first_name = first_name
1037 self.last_name = last_name
1037 self.last_name = last_name
1038 self.email_address = email_address
1038 self.email_address = email_address
1039 self.background = background or self.str2color(email_address)
1039 self.background = background or self.str2color(email_address)
1040 self.text_color = text_color
1040 self.text_color = text_color
1041
1041
1042 def get_color_bank(self):
1042 def get_color_bank(self):
1043 """
1043 """
1044 returns a predefined list of colors that gravatars can use.
1044 returns a predefined list of colors that gravatars can use.
1045 Those are randomized distinct colors that guarantee readability and
1045 Those are randomized distinct colors that guarantee readability and
1046 uniqueness.
1046 uniqueness.
1047
1047
1048 generated with: http://phrogz.net/css/distinct-colors.html
1048 generated with: http://phrogz.net/css/distinct-colors.html
1049 """
1049 """
1050 return [
1050 return [
1051 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1051 '#bf3030', '#a67f53', '#00ff00', '#5989b3', '#392040', '#d90000',
1052 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1052 '#402910', '#204020', '#79baf2', '#a700b3', '#bf6060', '#7f5320',
1053 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1053 '#008000', '#003059', '#ee00ff', '#ff0000', '#8c4b00', '#007300',
1054 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1054 '#005fb3', '#de73e6', '#ff4040', '#ffaa00', '#3df255', '#203140',
1055 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1055 '#47004d', '#591616', '#664400', '#59b365', '#0d2133', '#83008c',
1056 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1056 '#592d2d', '#bf9f60', '#73e682', '#1d3f73', '#73006b', '#402020',
1057 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1057 '#b2862d', '#397341', '#597db3', '#e600d6', '#a60000', '#736039',
1058 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1058 '#00b318', '#79aaf2', '#330d30', '#ff8080', '#403010', '#16591f',
1059 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1059 '#002459', '#8c4688', '#e50000', '#ffbf40', '#00732e', '#102340',
1060 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1060 '#bf60ac', '#8c4646', '#cc8800', '#00a642', '#1d3473', '#b32d98',
1061 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1061 '#660e00', '#ffd580', '#80ffb2', '#7391e6', '#733967', '#d97b6c',
1062 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1062 '#8c5e00', '#59b389', '#3967e6', '#590047', '#73281d', '#665200',
1063 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1063 '#00e67a', '#2d50b3', '#8c2377', '#734139', '#b2982d', '#16593a',
1064 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1064 '#001859', '#ff00aa', '#a65e53', '#ffcc00', '#0d3321', '#2d3959',
1065 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1065 '#731d56', '#401610', '#4c3d00', '#468c6c', '#002ca6', '#d936a3',
1066 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1066 '#d94c36', '#403920', '#36d9a3', '#0d1733', '#592d4a', '#993626',
1067 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1067 '#cca300', '#00734d', '#46598c', '#8c005e', '#7f1100', '#8c7000',
1068 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1068 '#00a66f', '#7382e6', '#b32d74', '#d9896c', '#ffe680', '#1d7362',
1069 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1069 '#364cd9', '#73003d', '#d93a00', '#998a4d', '#59b3a1', '#5965b3',
1070 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1070 '#e5007a', '#73341d', '#665f00', '#00b38f', '#0018b3', '#59163a',
1071 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1071 '#b2502d', '#bfb960', '#00ffcc', '#23318c', '#a6537f', '#734939',
1072 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1072 '#b2a700', '#104036', '#3d3df2', '#402031', '#e56739', '#736f39',
1073 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1073 '#79f2ea', '#000059', '#401029', '#4c1400', '#ffee00', '#005953',
1074 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1074 '#101040', '#990052', '#402820', '#403d10', '#00ffee', '#0000d9',
1075 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1075 '#ff80c4', '#a66953', '#eeff00', '#00ccbe', '#8080ff', '#e673a1',
1076 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1076 '#a62c00', '#474d00', '#1a3331', '#46468c', '#733950', '#662900',
1077 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1077 '#858c23', '#238c85', '#0f0073', '#b20047', '#d9986c', '#becc00',
1078 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1078 '#396f73', '#281d73', '#ff0066', '#ff6600', '#dee673', '#59adb3',
1079 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1079 '#6559b3', '#590024', '#b2622d', '#98b32d', '#36ced9', '#332d59',
1080 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1080 '#40001a', '#733f1d', '#526600', '#005359', '#242040', '#bf6079',
1081 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1081 '#735039', '#cef23d', '#007780', '#5630bf', '#66001b', '#b24700',
1082 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1082 '#acbf60', '#1d6273', '#25008c', '#731d34', '#a67453', '#50592d',
1083 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1083 '#00ccff', '#6600ff', '#ff0044', '#4c1f00', '#8a994d', '#79daf2',
1084 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1084 '#a173e6', '#d93662', '#402310', '#aaff00', '#2d98b3', '#8c40ff',
1085 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1085 '#592d39', '#ff8c40', '#354020', '#103640', '#1a0040', '#331a20',
1086 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1086 '#331400', '#334d00', '#1d5673', '#583973', '#7f0022', '#4c3626',
1087 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1087 '#88cc00', '#36a3d9', '#3d0073', '#d9364c', '#33241a', '#698c23',
1088 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1088 '#5995b3', '#300059', '#e57382', '#7f3300', '#366600', '#00aaff',
1089 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1089 '#3a1659', '#733941', '#663600', '#74b32d', '#003c59', '#7f53a6',
1090 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1090 '#73000f', '#ff8800', '#baf279', '#79caf2', '#291040', '#a6293a',
1091 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1091 '#b2742d', '#587339', '#0077b3', '#632699', '#400009', '#d9a66c',
1092 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1092 '#294010', '#2d4a59', '#aa00ff', '#4c131b', '#b25f00', '#5ce600',
1093 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1093 '#267399', '#a336d9', '#990014', '#664e33', '#86bf60', '#0088ff',
1094 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1094 '#7700b3', '#593a16', '#073300', '#1d4b73', '#ac60bf', '#e59539',
1095 '#4f8c46', '#368dd9', '#5c0073'
1095 '#4f8c46', '#368dd9', '#5c0073'
1096 ]
1096 ]
1097
1097
1098 def rgb_to_hex_color(self, rgb_tuple):
1098 def rgb_to_hex_color(self, rgb_tuple):
1099 """
1099 """
1100 Converts an rgb_tuple passed to an hex color.
1100 Converts an rgb_tuple passed to an hex color.
1101
1101
1102 :param rgb_tuple: tuple with 3 ints represents rgb color space
1102 :param rgb_tuple: tuple with 3 ints represents rgb color space
1103 """
1103 """
1104 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1104 return '#' + ("".join(map(chr, rgb_tuple)).encode('hex'))
1105
1105
1106 def email_to_int_list(self, email_str):
1106 def email_to_int_list(self, email_str):
1107 """
1107 """
1108 Get every byte of the hex digest value of email and turn it to integer.
1108 Get every byte of the hex digest value of email and turn it to integer.
1109 It's going to be always between 0-255
1109 It's going to be always between 0-255
1110 """
1110 """
1111 digest = md5_safe(email_str.lower())
1111 digest = md5_safe(email_str.lower())
1112 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1112 return [int(digest[i * 2:i * 2 + 2], 16) for i in range(16)]
1113
1113
1114 def pick_color_bank_index(self, email_str, color_bank):
1114 def pick_color_bank_index(self, email_str, color_bank):
1115 return self.email_to_int_list(email_str)[0] % len(color_bank)
1115 return self.email_to_int_list(email_str)[0] % len(color_bank)
1116
1116
1117 def str2color(self, email_str):
1117 def str2color(self, email_str):
1118 """
1118 """
1119 Tries to map in a stable algorithm an email to color
1119 Tries to map in a stable algorithm an email to color
1120
1120
1121 :param email_str:
1121 :param email_str:
1122 """
1122 """
1123 color_bank = self.get_color_bank()
1123 color_bank = self.get_color_bank()
1124 # pick position (module it's length so we always find it in the
1124 # pick position (module it's length so we always find it in the
1125 # bank even if it's smaller than 256 values
1125 # bank even if it's smaller than 256 values
1126 pos = self.pick_color_bank_index(email_str, color_bank)
1126 pos = self.pick_color_bank_index(email_str, color_bank)
1127 return color_bank[pos]
1127 return color_bank[pos]
1128
1128
1129 def normalize_email(self, email_address):
1129 def normalize_email(self, email_address):
1130 import unicodedata
1130 import unicodedata
1131 # default host used to fill in the fake/missing email
1131 # default host used to fill in the fake/missing email
1132 default_host = u'localhost'
1132 default_host = u'localhost'
1133
1133
1134 if not email_address:
1134 if not email_address:
1135 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1135 email_address = u'%s@%s' % (User.DEFAULT_USER, default_host)
1136
1136
1137 email_address = safe_unicode(email_address)
1137 email_address = safe_unicode(email_address)
1138
1138
1139 if u'@' not in email_address:
1139 if u'@' not in email_address:
1140 email_address = u'%s@%s' % (email_address, default_host)
1140 email_address = u'%s@%s' % (email_address, default_host)
1141
1141
1142 if email_address.endswith(u'@'):
1142 if email_address.endswith(u'@'):
1143 email_address = u'%s%s' % (email_address, default_host)
1143 email_address = u'%s%s' % (email_address, default_host)
1144
1144
1145 email_address = unicodedata.normalize('NFKD', email_address)\
1145 email_address = unicodedata.normalize('NFKD', email_address)\
1146 .encode('ascii', 'ignore')
1146 .encode('ascii', 'ignore')
1147 return email_address
1147 return email_address
1148
1148
1149 def get_initials(self):
1149 def get_initials(self):
1150 """
1150 """
1151 Returns 2 letter initials calculated based on the input.
1151 Returns 2 letter initials calculated based on the input.
1152 The algorithm picks first given email address, and takes first letter
1152 The algorithm picks first given email address, and takes first letter
1153 of part before @, and then the first letter of server name. In case
1153 of part before @, and then the first letter of server name. In case
1154 the part before @ is in a format of `somestring.somestring2` it replaces
1154 the part before @ is in a format of `somestring.somestring2` it replaces
1155 the server letter with first letter of somestring2
1155 the server letter with first letter of somestring2
1156
1156
1157 In case function was initialized with both first and lastname, this
1157 In case function was initialized with both first and lastname, this
1158 overrides the extraction from email by first letter of the first and
1158 overrides the extraction from email by first letter of the first and
1159 last name. We add special logic to that functionality, In case Full name
1159 last name. We add special logic to that functionality, In case Full name
1160 is compound, like Guido Von Rossum, we use last part of the last name
1160 is compound, like Guido Von Rossum, we use last part of the last name
1161 (Von Rossum) picking `R`.
1161 (Von Rossum) picking `R`.
1162
1162
1163 Function also normalizes the non-ascii characters to they ascii
1163 Function also normalizes the non-ascii characters to they ascii
1164 representation, eg Ą => A
1164 representation, eg Ą => A
1165 """
1165 """
1166 import unicodedata
1166 import unicodedata
1167 # replace non-ascii to ascii
1167 # replace non-ascii to ascii
1168 first_name = unicodedata.normalize(
1168 first_name = unicodedata.normalize(
1169 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1169 'NFKD', safe_unicode(self.first_name)).encode('ascii', 'ignore')
1170 last_name = unicodedata.normalize(
1170 last_name = unicodedata.normalize(
1171 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1171 'NFKD', safe_unicode(self.last_name)).encode('ascii', 'ignore')
1172
1172
1173 # do NFKD encoding, and also make sure email has proper format
1173 # do NFKD encoding, and also make sure email has proper format
1174 email_address = self.normalize_email(self.email_address)
1174 email_address = self.normalize_email(self.email_address)
1175
1175
1176 # first push the email initials
1176 # first push the email initials
1177 prefix, server = email_address.split('@', 1)
1177 prefix, server = email_address.split('@', 1)
1178
1178
1179 # check if prefix is maybe a 'first_name.last_name' syntax
1179 # check if prefix is maybe a 'first_name.last_name' syntax
1180 _dot_split = prefix.rsplit('.', 1)
1180 _dot_split = prefix.rsplit('.', 1)
1181 if len(_dot_split) == 2 and _dot_split[1]:
1181 if len(_dot_split) == 2 and _dot_split[1]:
1182 initials = [_dot_split[0][0], _dot_split[1][0]]
1182 initials = [_dot_split[0][0], _dot_split[1][0]]
1183 else:
1183 else:
1184 initials = [prefix[0], server[0]]
1184 initials = [prefix[0], server[0]]
1185
1185
1186 # then try to replace either first_name or last_name
1186 # then try to replace either first_name or last_name
1187 fn_letter = (first_name or " ")[0].strip()
1187 fn_letter = (first_name or " ")[0].strip()
1188 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1188 ln_letter = (last_name.split(' ', 1)[-1] or " ")[0].strip()
1189
1189
1190 if fn_letter:
1190 if fn_letter:
1191 initials[0] = fn_letter
1191 initials[0] = fn_letter
1192
1192
1193 if ln_letter:
1193 if ln_letter:
1194 initials[1] = ln_letter
1194 initials[1] = ln_letter
1195
1195
1196 return ''.join(initials).upper()
1196 return ''.join(initials).upper()
1197
1197
1198 def get_img_data_by_type(self, font_family, img_type):
1198 def get_img_data_by_type(self, font_family, img_type):
1199 default_user = """
1199 default_user = """
1200 <svg xmlns="http://www.w3.org/2000/svg"
1200 <svg xmlns="http://www.w3.org/2000/svg"
1201 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1201 version="1.1" x="0px" y="0px" width="{size}" height="{size}"
1202 viewBox="-15 -10 439.165 429.164"
1202 viewBox="-15 -10 439.165 429.164"
1203
1203
1204 xml:space="preserve"
1204 xml:space="preserve"
1205 style="background:{background};" >
1205 style="background:{background};" >
1206
1206
1207 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1207 <path d="M204.583,216.671c50.664,0,91.74-48.075,
1208 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1208 91.74-107.378c0-82.237-41.074-107.377-91.74-107.377
1209 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1209 c-50.668,0-91.74,25.14-91.74,107.377C112.844,
1210 168.596,153.916,216.671,
1210 168.596,153.916,216.671,
1211 204.583,216.671z" fill="{text_color}"/>
1211 204.583,216.671z" fill="{text_color}"/>
1212 <path d="M407.164,374.717L360.88,
1212 <path d="M407.164,374.717L360.88,
1213 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1213 270.454c-2.117-4.771-5.836-8.728-10.465-11.138l-71.83-37.392
1214 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1214 c-1.584-0.823-3.502-0.663-4.926,0.415c-20.316,
1215 15.366-44.203,23.488-69.076,23.488c-24.877,
1215 15.366-44.203,23.488-69.076,23.488c-24.877,
1216 0-48.762-8.122-69.078-23.488
1216 0-48.762-8.122-69.078-23.488
1217 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1217 c-1.428-1.078-3.346-1.238-4.93-0.415L58.75,
1218 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1218 259.316c-4.631,2.41-8.346,6.365-10.465,11.138L2.001,374.717
1219 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1219 c-3.191,7.188-2.537,15.412,1.75,22.005c4.285,
1220 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1220 6.592,11.537,10.526,19.4,10.526h362.861c7.863,0,15.117-3.936,
1221 19.402-10.527 C409.699,390.129,
1221 19.402-10.527 C409.699,390.129,
1222 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1222 410.355,381.902,407.164,374.717z" fill="{text_color}"/>
1223 </svg>""".format(
1223 </svg>""".format(
1224 size=self.size,
1224 size=self.size,
1225 background='#979797', # @grey4
1225 background='#979797', # @grey4
1226 text_color=self.text_color,
1226 text_color=self.text_color,
1227 font_family=font_family)
1227 font_family=font_family)
1228
1228
1229 return {
1229 return {
1230 "default_user": default_user
1230 "default_user": default_user
1231 }[img_type]
1231 }[img_type]
1232
1232
1233 def get_img_data(self, svg_type=None):
1233 def get_img_data(self, svg_type=None):
1234 """
1234 """
1235 generates the svg metadata for image
1235 generates the svg metadata for image
1236 """
1236 """
1237
1237
1238 font_family = ','.join([
1238 font_family = ','.join([
1239 'proximanovaregular',
1239 'proximanovaregular',
1240 'Proxima Nova Regular',
1240 'Proxima Nova Regular',
1241 'Proxima Nova',
1241 'Proxima Nova',
1242 'Arial',
1242 'Arial',
1243 'Lucida Grande',
1243 'Lucida Grande',
1244 'sans-serif'
1244 'sans-serif'
1245 ])
1245 ])
1246 if svg_type:
1246 if svg_type:
1247 return self.get_img_data_by_type(font_family, svg_type)
1247 return self.get_img_data_by_type(font_family, svg_type)
1248
1248
1249 initials = self.get_initials()
1249 initials = self.get_initials()
1250 img_data = """
1250 img_data = """
1251 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1251 <svg xmlns="http://www.w3.org/2000/svg" pointer-events="none"
1252 width="{size}" height="{size}"
1252 width="{size}" height="{size}"
1253 style="width: 100%; height: 100%; background-color: {background}"
1253 style="width: 100%; height: 100%; background-color: {background}"
1254 viewBox="0 0 {size} {size}">
1254 viewBox="0 0 {size} {size}">
1255 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1255 <text text-anchor="middle" y="50%" x="50%" dy="0.35em"
1256 pointer-events="auto" fill="{text_color}"
1256 pointer-events="auto" fill="{text_color}"
1257 font-family="{font_family}"
1257 font-family="{font_family}"
1258 style="font-weight: 400; font-size: {f_size}px;">{text}
1258 style="font-weight: 400; font-size: {f_size}px;">{text}
1259 </text>
1259 </text>
1260 </svg>""".format(
1260 </svg>""".format(
1261 size=self.size,
1261 size=self.size,
1262 f_size=self.size/1.85, # scale the text inside the box nicely
1262 f_size=self.size/1.85, # scale the text inside the box nicely
1263 background=self.background,
1263 background=self.background,
1264 text_color=self.text_color,
1264 text_color=self.text_color,
1265 text=initials.upper(),
1265 text=initials.upper(),
1266 font_family=font_family)
1266 font_family=font_family)
1267
1267
1268 return img_data
1268 return img_data
1269
1269
1270 def generate_svg(self, svg_type=None):
1270 def generate_svg(self, svg_type=None):
1271 img_data = self.get_img_data(svg_type)
1271 img_data = self.get_img_data(svg_type)
1272 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1272 return "data:image/svg+xml;base64,%s" % img_data.encode('base64')
1273
1273
1274
1274
1275 def initials_gravatar(email_address, first_name, last_name, size=30):
1275 def initials_gravatar(email_address, first_name, last_name, size=30):
1276 svg_type = None
1276 svg_type = None
1277 if email_address == User.DEFAULT_USER_EMAIL:
1277 if email_address == User.DEFAULT_USER_EMAIL:
1278 svg_type = 'default_user'
1278 svg_type = 'default_user'
1279 klass = InitialsGravatar(email_address, first_name, last_name, size)
1279 klass = InitialsGravatar(email_address, first_name, last_name, size)
1280 return klass.generate_svg(svg_type=svg_type)
1280 return klass.generate_svg(svg_type=svg_type)
1281
1281
1282
1282
1283 def gravatar_url(email_address, size=30, request=None):
1283 def gravatar_url(email_address, size=30, request=None):
1284 request = get_current_request()
1284 request = get_current_request()
1285 _use_gravatar = request.call_context.visual.use_gravatar
1285 _use_gravatar = request.call_context.visual.use_gravatar
1286 _gravatar_url = request.call_context.visual.gravatar_url
1286 _gravatar_url = request.call_context.visual.gravatar_url
1287
1287
1288 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1288 _gravatar_url = _gravatar_url or User.DEFAULT_GRAVATAR_URL
1289
1289
1290 email_address = email_address or User.DEFAULT_USER_EMAIL
1290 email_address = email_address or User.DEFAULT_USER_EMAIL
1291 if isinstance(email_address, unicode):
1291 if isinstance(email_address, unicode):
1292 # hashlib crashes on unicode items
1292 # hashlib crashes on unicode items
1293 email_address = safe_str(email_address)
1293 email_address = safe_str(email_address)
1294
1294
1295 # empty email or default user
1295 # empty email or default user
1296 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1296 if not email_address or email_address == User.DEFAULT_USER_EMAIL:
1297 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1297 return initials_gravatar(User.DEFAULT_USER_EMAIL, '', '', size=size)
1298
1298
1299 if _use_gravatar:
1299 if _use_gravatar:
1300 # TODO: Disuse pyramid thread locals. Think about another solution to
1300 # TODO: Disuse pyramid thread locals. Think about another solution to
1301 # get the host and schema here.
1301 # get the host and schema here.
1302 request = get_current_request()
1302 request = get_current_request()
1303 tmpl = safe_str(_gravatar_url)
1303 tmpl = safe_str(_gravatar_url)
1304 tmpl = tmpl.replace('{email}', email_address)\
1304 tmpl = tmpl.replace('{email}', email_address)\
1305 .replace('{md5email}', md5_safe(email_address.lower())) \
1305 .replace('{md5email}', md5_safe(email_address.lower())) \
1306 .replace('{netloc}', request.host)\
1306 .replace('{netloc}', request.host)\
1307 .replace('{scheme}', request.scheme)\
1307 .replace('{scheme}', request.scheme)\
1308 .replace('{size}', safe_str(size))
1308 .replace('{size}', safe_str(size))
1309 return tmpl
1309 return tmpl
1310 else:
1310 else:
1311 return initials_gravatar(email_address, '', '', size=size)
1311 return initials_gravatar(email_address, '', '', size=size)
1312
1312
1313
1313
1314 class Page(_Page):
1314 class Page(_Page):
1315 """
1315 """
1316 Custom pager to match rendering style with paginator
1316 Custom pager to match rendering style with paginator
1317 """
1317 """
1318
1318
1319 def _get_pos(self, cur_page, max_page, items):
1319 def _get_pos(self, cur_page, max_page, items):
1320 edge = (items / 2) + 1
1320 edge = (items / 2) + 1
1321 if (cur_page <= edge):
1321 if (cur_page <= edge):
1322 radius = max(items / 2, items - cur_page)
1322 radius = max(items / 2, items - cur_page)
1323 elif (max_page - cur_page) < edge:
1323 elif (max_page - cur_page) < edge:
1324 radius = (items - 1) - (max_page - cur_page)
1324 radius = (items - 1) - (max_page - cur_page)
1325 else:
1325 else:
1326 radius = items / 2
1326 radius = items / 2
1327
1327
1328 left = max(1, (cur_page - (radius)))
1328 left = max(1, (cur_page - (radius)))
1329 right = min(max_page, cur_page + (radius))
1329 right = min(max_page, cur_page + (radius))
1330 return left, cur_page, right
1330 return left, cur_page, right
1331
1331
1332 def _range(self, regexp_match):
1332 def _range(self, regexp_match):
1333 """
1333 """
1334 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1334 Return range of linked pages (e.g. '1 2 [3] 4 5 6 7 8').
1335
1335
1336 Arguments:
1336 Arguments:
1337
1337
1338 regexp_match
1338 regexp_match
1339 A "re" (regular expressions) match object containing the
1339 A "re" (regular expressions) match object containing the
1340 radius of linked pages around the current page in
1340 radius of linked pages around the current page in
1341 regexp_match.group(1) as a string
1341 regexp_match.group(1) as a string
1342
1342
1343 This function is supposed to be called as a callable in
1343 This function is supposed to be called as a callable in
1344 re.sub.
1344 re.sub.
1345
1345
1346 """
1346 """
1347 radius = int(regexp_match.group(1))
1347 radius = int(regexp_match.group(1))
1348
1348
1349 # Compute the first and last page number within the radius
1349 # Compute the first and last page number within the radius
1350 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1350 # e.g. '1 .. 5 6 [7] 8 9 .. 12'
1351 # -> leftmost_page = 5
1351 # -> leftmost_page = 5
1352 # -> rightmost_page = 9
1352 # -> rightmost_page = 9
1353 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1353 leftmost_page, _cur, rightmost_page = self._get_pos(self.page,
1354 self.last_page,
1354 self.last_page,
1355 (radius * 2) + 1)
1355 (radius * 2) + 1)
1356 nav_items = []
1356 nav_items = []
1357
1357
1358 # Create a link to the first page (unless we are on the first page
1358 # Create a link to the first page (unless we are on the first page
1359 # or there would be no need to insert '..' spacers)
1359 # or there would be no need to insert '..' spacers)
1360 if self.page != self.first_page and self.first_page < leftmost_page:
1360 if self.page != self.first_page and self.first_page < leftmost_page:
1361 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1361 nav_items.append(self._pagerlink(self.first_page, self.first_page))
1362
1362
1363 # Insert dots if there are pages between the first page
1363 # Insert dots if there are pages between the first page
1364 # and the currently displayed page range
1364 # and the currently displayed page range
1365 if leftmost_page - self.first_page > 1:
1365 if leftmost_page - self.first_page > 1:
1366 # Wrap in a SPAN tag if nolink_attr is set
1366 # Wrap in a SPAN tag if nolink_attr is set
1367 text = '..'
1367 text = '..'
1368 if self.dotdot_attr:
1368 if self.dotdot_attr:
1369 text = HTML.span(c=text, **self.dotdot_attr)
1369 text = HTML.span(c=text, **self.dotdot_attr)
1370 nav_items.append(text)
1370 nav_items.append(text)
1371
1371
1372 for thispage in xrange(leftmost_page, rightmost_page + 1):
1372 for thispage in xrange(leftmost_page, rightmost_page + 1):
1373 # Hilight the current page number and do not use a link
1373 # Hilight the current page number and do not use a link
1374 if thispage == self.page:
1374 if thispage == self.page:
1375 text = '%s' % (thispage,)
1375 text = '%s' % (thispage,)
1376 # Wrap in a SPAN tag if nolink_attr is set
1376 # Wrap in a SPAN tag if nolink_attr is set
1377 if self.curpage_attr:
1377 if self.curpage_attr:
1378 text = HTML.span(c=text, **self.curpage_attr)
1378 text = HTML.span(c=text, **self.curpage_attr)
1379 nav_items.append(text)
1379 nav_items.append(text)
1380 # Otherwise create just a link to that page
1380 # Otherwise create just a link to that page
1381 else:
1381 else:
1382 text = '%s' % (thispage,)
1382 text = '%s' % (thispage,)
1383 nav_items.append(self._pagerlink(thispage, text))
1383 nav_items.append(self._pagerlink(thispage, text))
1384
1384
1385 # Insert dots if there are pages between the displayed
1385 # Insert dots if there are pages between the displayed
1386 # page numbers and the end of the page range
1386 # page numbers and the end of the page range
1387 if self.last_page - rightmost_page > 1:
1387 if self.last_page - rightmost_page > 1:
1388 text = '..'
1388 text = '..'
1389 # Wrap in a SPAN tag if nolink_attr is set
1389 # Wrap in a SPAN tag if nolink_attr is set
1390 if self.dotdot_attr:
1390 if self.dotdot_attr:
1391 text = HTML.span(c=text, **self.dotdot_attr)
1391 text = HTML.span(c=text, **self.dotdot_attr)
1392 nav_items.append(text)
1392 nav_items.append(text)
1393
1393
1394 # Create a link to the very last page (unless we are on the last
1394 # Create a link to the very last page (unless we are on the last
1395 # page or there would be no need to insert '..' spacers)
1395 # page or there would be no need to insert '..' spacers)
1396 if self.page != self.last_page and rightmost_page < self.last_page:
1396 if self.page != self.last_page and rightmost_page < self.last_page:
1397 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1397 nav_items.append(self._pagerlink(self.last_page, self.last_page))
1398
1398
1399 ## prerender links
1399 ## prerender links
1400 #_page_link = url.current()
1400 #_page_link = url.current()
1401 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1401 #nav_items.append(literal('<link rel="prerender" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1402 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1402 #nav_items.append(literal('<link rel="prefetch" href="%s?page=%s">' % (_page_link, str(int(self.page)+1))))
1403 return self.separator.join(nav_items)
1403 return self.separator.join(nav_items)
1404
1404
1405 def pager(self, format='~2~', page_param='page', partial_param='partial',
1405 def pager(self, format='~2~', page_param='page', partial_param='partial',
1406 show_if_single_page=False, separator=' ', onclick=None,
1406 show_if_single_page=False, separator=' ', onclick=None,
1407 symbol_first='<<', symbol_last='>>',
1407 symbol_first='<<', symbol_last='>>',
1408 symbol_previous='<', symbol_next='>',
1408 symbol_previous='<', symbol_next='>',
1409 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1409 link_attr={'class': 'pager_link', 'rel': 'prerender'},
1410 curpage_attr={'class': 'pager_curpage'},
1410 curpage_attr={'class': 'pager_curpage'},
1411 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1411 dotdot_attr={'class': 'pager_dotdot'}, **kwargs):
1412
1412
1413 self.curpage_attr = curpage_attr
1413 self.curpage_attr = curpage_attr
1414 self.separator = separator
1414 self.separator = separator
1415 self.pager_kwargs = kwargs
1415 self.pager_kwargs = kwargs
1416 self.page_param = page_param
1416 self.page_param = page_param
1417 self.partial_param = partial_param
1417 self.partial_param = partial_param
1418 self.onclick = onclick
1418 self.onclick = onclick
1419 self.link_attr = link_attr
1419 self.link_attr = link_attr
1420 self.dotdot_attr = dotdot_attr
1420 self.dotdot_attr = dotdot_attr
1421
1421
1422 # Don't show navigator if there is no more than one page
1422 # Don't show navigator if there is no more than one page
1423 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1423 if self.page_count == 0 or (self.page_count == 1 and not show_if_single_page):
1424 return ''
1424 return ''
1425
1425
1426 from string import Template
1426 from string import Template
1427 # Replace ~...~ in token format by range of pages
1427 # Replace ~...~ in token format by range of pages
1428 result = re.sub(r'~(\d+)~', self._range, format)
1428 result = re.sub(r'~(\d+)~', self._range, format)
1429
1429
1430 # Interpolate '%' variables
1430 # Interpolate '%' variables
1431 result = Template(result).safe_substitute({
1431 result = Template(result).safe_substitute({
1432 'first_page': self.first_page,
1432 'first_page': self.first_page,
1433 'last_page': self.last_page,
1433 'last_page': self.last_page,
1434 'page': self.page,
1434 'page': self.page,
1435 'page_count': self.page_count,
1435 'page_count': self.page_count,
1436 'items_per_page': self.items_per_page,
1436 'items_per_page': self.items_per_page,
1437 'first_item': self.first_item,
1437 'first_item': self.first_item,
1438 'last_item': self.last_item,
1438 'last_item': self.last_item,
1439 'item_count': self.item_count,
1439 'item_count': self.item_count,
1440 'link_first': self.page > self.first_page and \
1440 'link_first': self.page > self.first_page and \
1441 self._pagerlink(self.first_page, symbol_first) or '',
1441 self._pagerlink(self.first_page, symbol_first) or '',
1442 'link_last': self.page < self.last_page and \
1442 'link_last': self.page < self.last_page and \
1443 self._pagerlink(self.last_page, symbol_last) or '',
1443 self._pagerlink(self.last_page, symbol_last) or '',
1444 'link_previous': self.previous_page and \
1444 'link_previous': self.previous_page and \
1445 self._pagerlink(self.previous_page, symbol_previous) \
1445 self._pagerlink(self.previous_page, symbol_previous) \
1446 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1446 or HTML.span(symbol_previous, class_="pg-previous disabled"),
1447 'link_next': self.next_page and \
1447 'link_next': self.next_page and \
1448 self._pagerlink(self.next_page, symbol_next) \
1448 self._pagerlink(self.next_page, symbol_next) \
1449 or HTML.span(symbol_next, class_="pg-next disabled")
1449 or HTML.span(symbol_next, class_="pg-next disabled")
1450 })
1450 })
1451
1451
1452 return literal(result)
1452 return literal(result)
1453
1453
1454
1454
1455 #==============================================================================
1455 #==============================================================================
1456 # REPO PAGER, PAGER FOR REPOSITORY
1456 # REPO PAGER, PAGER FOR REPOSITORY
1457 #==============================================================================
1457 #==============================================================================
1458 class RepoPage(Page):
1458 class RepoPage(Page):
1459
1459
1460 def __init__(self, collection, page=1, items_per_page=20,
1460 def __init__(self, collection, page=1, items_per_page=20,
1461 item_count=None, url=None, **kwargs):
1461 item_count=None, url=None, **kwargs):
1462
1462
1463 """Create a "RepoPage" instance. special pager for paging
1463 """Create a "RepoPage" instance. special pager for paging
1464 repository
1464 repository
1465 """
1465 """
1466 self._url_generator = url
1466 self._url_generator = url
1467
1467
1468 # Safe the kwargs class-wide so they can be used in the pager() method
1468 # Safe the kwargs class-wide so they can be used in the pager() method
1469 self.kwargs = kwargs
1469 self.kwargs = kwargs
1470
1470
1471 # Save a reference to the collection
1471 # Save a reference to the collection
1472 self.original_collection = collection
1472 self.original_collection = collection
1473
1473
1474 self.collection = collection
1474 self.collection = collection
1475
1475
1476 # The self.page is the number of the current page.
1476 # The self.page is the number of the current page.
1477 # The first page has the number 1!
1477 # The first page has the number 1!
1478 try:
1478 try:
1479 self.page = int(page) # make it int() if we get it as a string
1479 self.page = int(page) # make it int() if we get it as a string
1480 except (ValueError, TypeError):
1480 except (ValueError, TypeError):
1481 self.page = 1
1481 self.page = 1
1482
1482
1483 self.items_per_page = items_per_page
1483 self.items_per_page = items_per_page
1484
1484
1485 # Unless the user tells us how many items the collections has
1485 # Unless the user tells us how many items the collections has
1486 # we calculate that ourselves.
1486 # we calculate that ourselves.
1487 if item_count is not None:
1487 if item_count is not None:
1488 self.item_count = item_count
1488 self.item_count = item_count
1489 else:
1489 else:
1490 self.item_count = len(self.collection)
1490 self.item_count = len(self.collection)
1491
1491
1492 # Compute the number of the first and last available page
1492 # Compute the number of the first and last available page
1493 if self.item_count > 0:
1493 if self.item_count > 0:
1494 self.first_page = 1
1494 self.first_page = 1
1495 self.page_count = int(math.ceil(float(self.item_count) /
1495 self.page_count = int(math.ceil(float(self.item_count) /
1496 self.items_per_page))
1496 self.items_per_page))
1497 self.last_page = self.first_page + self.page_count - 1
1497 self.last_page = self.first_page + self.page_count - 1
1498
1498
1499 # Make sure that the requested page number is the range of
1499 # Make sure that the requested page number is the range of
1500 # valid pages
1500 # valid pages
1501 if self.page > self.last_page:
1501 if self.page > self.last_page:
1502 self.page = self.last_page
1502 self.page = self.last_page
1503 elif self.page < self.first_page:
1503 elif self.page < self.first_page:
1504 self.page = self.first_page
1504 self.page = self.first_page
1505
1505
1506 # Note: the number of items on this page can be less than
1506 # Note: the number of items on this page can be less than
1507 # items_per_page if the last page is not full
1507 # items_per_page if the last page is not full
1508 self.first_item = max(0, (self.item_count) - (self.page *
1508 self.first_item = max(0, (self.item_count) - (self.page *
1509 items_per_page))
1509 items_per_page))
1510 self.last_item = ((self.item_count - 1) - items_per_page *
1510 self.last_item = ((self.item_count - 1) - items_per_page *
1511 (self.page - 1))
1511 (self.page - 1))
1512
1512
1513 self.items = list(self.collection[self.first_item:self.last_item + 1])
1513 self.items = list(self.collection[self.first_item:self.last_item + 1])
1514
1514
1515 # Links to previous and next page
1515 # Links to previous and next page
1516 if self.page > self.first_page:
1516 if self.page > self.first_page:
1517 self.previous_page = self.page - 1
1517 self.previous_page = self.page - 1
1518 else:
1518 else:
1519 self.previous_page = None
1519 self.previous_page = None
1520
1520
1521 if self.page < self.last_page:
1521 if self.page < self.last_page:
1522 self.next_page = self.page + 1
1522 self.next_page = self.page + 1
1523 else:
1523 else:
1524 self.next_page = None
1524 self.next_page = None
1525
1525
1526 # No items available
1526 # No items available
1527 else:
1527 else:
1528 self.first_page = None
1528 self.first_page = None
1529 self.page_count = 0
1529 self.page_count = 0
1530 self.last_page = None
1530 self.last_page = None
1531 self.first_item = None
1531 self.first_item = None
1532 self.last_item = None
1532 self.last_item = None
1533 self.previous_page = None
1533 self.previous_page = None
1534 self.next_page = None
1534 self.next_page = None
1535 self.items = []
1535 self.items = []
1536
1536
1537 # This is a subclass of the 'list' type. Initialise the list now.
1537 # This is a subclass of the 'list' type. Initialise the list now.
1538 list.__init__(self, reversed(self.items))
1538 list.__init__(self, reversed(self.items))
1539
1539
1540
1540
1541 def breadcrumb_repo_link(repo):
1541 def breadcrumb_repo_link(repo):
1542 """
1542 """
1543 Makes a breadcrumbs path link to repo
1543 Makes a breadcrumbs path link to repo
1544
1544
1545 ex::
1545 ex::
1546 group >> subgroup >> repo
1546 group >> subgroup >> repo
1547
1547
1548 :param repo: a Repository instance
1548 :param repo: a Repository instance
1549 """
1549 """
1550
1550
1551 path = [
1551 path = [
1552 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1552 link_to(group.name, route_path('repo_group_home', repo_group_name=group.group_name))
1553 for group in repo.groups_with_parents
1553 for group in repo.groups_with_parents
1554 ] + [
1554 ] + [
1555 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1555 link_to(repo.just_name, route_path('repo_summary', repo_name=repo.repo_name))
1556 ]
1556 ]
1557
1557
1558 return literal(' &raquo; '.join(path))
1558 return literal(' &raquo; '.join(path))
1559
1559
1560
1560
1561 def format_byte_size_binary(file_size):
1561 def format_byte_size_binary(file_size):
1562 """
1562 """
1563 Formats file/folder sizes to standard.
1563 Formats file/folder sizes to standard.
1564 """
1564 """
1565 if file_size is None:
1565 if file_size is None:
1566 file_size = 0
1566 file_size = 0
1567
1567
1568 formatted_size = format_byte_size(file_size, binary=True)
1568 formatted_size = format_byte_size(file_size, binary=True)
1569 return formatted_size
1569 return formatted_size
1570
1570
1571
1571
1572 def urlify_text(text_, safe=True):
1572 def urlify_text(text_, safe=True):
1573 """
1573 """
1574 Extrac urls from text and make html links out of them
1574 Extrac urls from text and make html links out of them
1575
1575
1576 :param text_:
1576 :param text_:
1577 """
1577 """
1578
1578
1579 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1579 url_pat = re.compile(r'''(http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@#.&+]'''
1580 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1580 '''|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)''')
1581
1581
1582 def url_func(match_obj):
1582 def url_func(match_obj):
1583 url_full = match_obj.groups()[0]
1583 url_full = match_obj.groups()[0]
1584 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1584 return '<a href="%(url)s">%(url)s</a>' % ({'url': url_full})
1585 _newtext = url_pat.sub(url_func, text_)
1585 _newtext = url_pat.sub(url_func, text_)
1586 if safe:
1586 if safe:
1587 return literal(_newtext)
1587 return literal(_newtext)
1588 return _newtext
1588 return _newtext
1589
1589
1590
1590
1591 def urlify_commits(text_, repository):
1591 def urlify_commits(text_, repository):
1592 """
1592 """
1593 Extract commit ids from text and make link from them
1593 Extract commit ids from text and make link from them
1594
1594
1595 :param text_:
1595 :param text_:
1596 :param repository: repo name to build the URL with
1596 :param repository: repo name to build the URL with
1597 """
1597 """
1598
1598
1599 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1599 URL_PAT = re.compile(r'(^|\s)([0-9a-fA-F]{12,40})($|\s)')
1600
1600
1601 def url_func(match_obj):
1601 def url_func(match_obj):
1602 commit_id = match_obj.groups()[1]
1602 commit_id = match_obj.groups()[1]
1603 pref = match_obj.groups()[0]
1603 pref = match_obj.groups()[0]
1604 suf = match_obj.groups()[2]
1604 suf = match_obj.groups()[2]
1605
1605
1606 tmpl = (
1606 tmpl = (
1607 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1607 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1608 '%(commit_id)s</a>%(suf)s'
1608 '%(commit_id)s</a>%(suf)s'
1609 )
1609 )
1610 return tmpl % {
1610 return tmpl % {
1611 'pref': pref,
1611 'pref': pref,
1612 'cls': 'revision-link',
1612 'cls': 'revision-link',
1613 'url': route_url('repo_commit', repo_name=repository,
1613 'url': route_url('repo_commit', repo_name=repository,
1614 commit_id=commit_id),
1614 commit_id=commit_id),
1615 'commit_id': commit_id,
1615 'commit_id': commit_id,
1616 'suf': suf
1616 'suf': suf
1617 }
1617 }
1618
1618
1619 newtext = URL_PAT.sub(url_func, text_)
1619 newtext = URL_PAT.sub(url_func, text_)
1620
1620
1621 return newtext
1621 return newtext
1622
1622
1623
1623
1624 def _process_url_func(match_obj, repo_name, uid, entry,
1624 def _process_url_func(match_obj, repo_name, uid, entry,
1625 return_raw_data=False, link_format='html'):
1625 return_raw_data=False, link_format='html'):
1626 pref = ''
1626 pref = ''
1627 if match_obj.group().startswith(' '):
1627 if match_obj.group().startswith(' '):
1628 pref = ' '
1628 pref = ' '
1629
1629
1630 issue_id = ''.join(match_obj.groups())
1630 issue_id = ''.join(match_obj.groups())
1631
1631
1632 if link_format == 'html':
1632 if link_format == 'html':
1633 tmpl = (
1633 tmpl = (
1634 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1634 '%(pref)s<a class="%(cls)s" href="%(url)s">'
1635 '%(issue-prefix)s%(id-repr)s'
1635 '%(issue-prefix)s%(id-repr)s'
1636 '</a>')
1636 '</a>')
1637 elif link_format == 'rst':
1637 elif link_format == 'rst':
1638 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1638 tmpl = '`%(issue-prefix)s%(id-repr)s <%(url)s>`_'
1639 elif link_format == 'markdown':
1639 elif link_format == 'markdown':
1640 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1640 tmpl = '[%(issue-prefix)s%(id-repr)s](%(url)s)'
1641 else:
1641 else:
1642 raise ValueError('Bad link_format:{}'.format(link_format))
1642 raise ValueError('Bad link_format:{}'.format(link_format))
1643
1643
1644 (repo_name_cleaned,
1644 (repo_name_cleaned,
1645 parent_group_name) = RepoGroupModel().\
1645 parent_group_name) = RepoGroupModel().\
1646 _get_group_name_and_parent(repo_name)
1646 _get_group_name_and_parent(repo_name)
1647
1647
1648 # variables replacement
1648 # variables replacement
1649 named_vars = {
1649 named_vars = {
1650 'id': issue_id,
1650 'id': issue_id,
1651 'repo': repo_name,
1651 'repo': repo_name,
1652 'repo_name': repo_name_cleaned,
1652 'repo_name': repo_name_cleaned,
1653 'group_name': parent_group_name
1653 'group_name': parent_group_name
1654 }
1654 }
1655 # named regex variables
1655 # named regex variables
1656 named_vars.update(match_obj.groupdict())
1656 named_vars.update(match_obj.groupdict())
1657 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1657 _url = string.Template(entry['url']).safe_substitute(**named_vars)
1658
1658
1659 data = {
1659 data = {
1660 'pref': pref,
1660 'pref': pref,
1661 'cls': 'issue-tracker-link',
1661 'cls': 'issue-tracker-link',
1662 'url': _url,
1662 'url': _url,
1663 'id-repr': issue_id,
1663 'id-repr': issue_id,
1664 'issue-prefix': entry['pref'],
1664 'issue-prefix': entry['pref'],
1665 'serv': entry['url'],
1665 'serv': entry['url'],
1666 }
1666 }
1667 if return_raw_data:
1667 if return_raw_data:
1668 return {
1668 return {
1669 'id': issue_id,
1669 'id': issue_id,
1670 'url': _url
1670 'url': _url
1671 }
1671 }
1672 return tmpl % data
1672 return tmpl % data
1673
1673
1674
1674
1675 def get_active_pattern_entries(repo_name):
1675 def get_active_pattern_entries(repo_name):
1676 repo = None
1676 repo = None
1677 if repo_name:
1677 if repo_name:
1678 # Retrieving repo_name to avoid invalid repo_name to explode on
1678 # Retrieving repo_name to avoid invalid repo_name to explode on
1679 # IssueTrackerSettingsModel but still passing invalid name further down
1679 # IssueTrackerSettingsModel but still passing invalid name further down
1680 repo = Repository.get_by_repo_name(repo_name, cache=True)
1680 repo = Repository.get_by_repo_name(repo_name, cache=True)
1681
1681
1682 settings_model = IssueTrackerSettingsModel(repo=repo)
1682 settings_model = IssueTrackerSettingsModel(repo=repo)
1683 active_entries = settings_model.get_settings(cache=True)
1683 active_entries = settings_model.get_settings(cache=True)
1684 return active_entries
1684 return active_entries
1685
1685
1686
1686
1687 def process_patterns(text_string, repo_name, link_format='html',
1687 def process_patterns(text_string, repo_name, link_format='html',
1688 active_entries=None):
1688 active_entries=None):
1689
1689
1690 allowed_formats = ['html', 'rst', 'markdown']
1690 allowed_formats = ['html', 'rst', 'markdown']
1691 if link_format not in allowed_formats:
1691 if link_format not in allowed_formats:
1692 raise ValueError('Link format can be only one of:{} got {}'.format(
1692 raise ValueError('Link format can be only one of:{} got {}'.format(
1693 allowed_formats, link_format))
1693 allowed_formats, link_format))
1694
1694
1695 active_entries = active_entries or get_active_pattern_entries(repo_name)
1695 active_entries = active_entries or get_active_pattern_entries(repo_name)
1696 issues_data = []
1696 issues_data = []
1697 newtext = text_string
1697 newtext = text_string
1698
1698
1699 for uid, entry in active_entries.items():
1699 for uid, entry in active_entries.items():
1700 log.debug('found issue tracker entry with uid %s' % (uid,))
1700 log.debug('found issue tracker entry with uid %s' % (uid,))
1701
1701
1702 if not (entry['pat'] and entry['url']):
1702 if not (entry['pat'] and entry['url']):
1703 log.debug('skipping due to missing data')
1703 log.debug('skipping due to missing data')
1704 continue
1704 continue
1705
1705
1706 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1706 log.debug('issue tracker entry: uid: `%s` PAT:%s URL:%s PREFIX:%s'
1707 % (uid, entry['pat'], entry['url'], entry['pref']))
1707 % (uid, entry['pat'], entry['url'], entry['pref']))
1708
1708
1709 try:
1709 try:
1710 pattern = re.compile(r'%s' % entry['pat'])
1710 pattern = re.compile(r'%s' % entry['pat'])
1711 except re.error:
1711 except re.error:
1712 log.exception(
1712 log.exception(
1713 'issue tracker pattern: `%s` failed to compile',
1713 'issue tracker pattern: `%s` failed to compile',
1714 entry['pat'])
1714 entry['pat'])
1715 continue
1715 continue
1716
1716
1717 data_func = partial(
1717 data_func = partial(
1718 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1718 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1719 return_raw_data=True)
1719 return_raw_data=True)
1720
1720
1721 for match_obj in pattern.finditer(text_string):
1721 for match_obj in pattern.finditer(text_string):
1722 issues_data.append(data_func(match_obj))
1722 issues_data.append(data_func(match_obj))
1723
1723
1724 url_func = partial(
1724 url_func = partial(
1725 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1725 _process_url_func, repo_name=repo_name, entry=entry, uid=uid,
1726 link_format=link_format)
1726 link_format=link_format)
1727
1727
1728 newtext = pattern.sub(url_func, newtext)
1728 newtext = pattern.sub(url_func, newtext)
1729 log.debug('processed prefix:uid `%s`' % (uid,))
1729 log.debug('processed prefix:uid `%s`' % (uid,))
1730
1730
1731 return newtext, issues_data
1731 return newtext, issues_data
1732
1732
1733
1733
1734 def urlify_commit_message(commit_text, repository=None,
1734 def urlify_commit_message(commit_text, repository=None,
1735 active_pattern_entries=None):
1735 active_pattern_entries=None):
1736 """
1736 """
1737 Parses given text message and makes proper links.
1737 Parses given text message and makes proper links.
1738 issues are linked to given issue-server, and rest is a commit link
1738 issues are linked to given issue-server, and rest is a commit link
1739
1739
1740 :param commit_text:
1740 :param commit_text:
1741 :param repository:
1741 :param repository:
1742 """
1742 """
1743 def escaper(string):
1743 def escaper(string):
1744 return string.replace('<', '&lt;').replace('>', '&gt;')
1744 return string.replace('<', '&lt;').replace('>', '&gt;')
1745
1745
1746 newtext = escaper(commit_text)
1746 newtext = escaper(commit_text)
1747
1747
1748 # extract http/https links and make them real urls
1748 # extract http/https links and make them real urls
1749 newtext = urlify_text(newtext, safe=False)
1749 newtext = urlify_text(newtext, safe=False)
1750
1750
1751 # urlify commits - extract commit ids and make link out of them, if we have
1751 # urlify commits - extract commit ids and make link out of them, if we have
1752 # the scope of repository present.
1752 # the scope of repository present.
1753 if repository:
1753 if repository:
1754 newtext = urlify_commits(newtext, repository)
1754 newtext = urlify_commits(newtext, repository)
1755
1755
1756 # process issue tracker patterns
1756 # process issue tracker patterns
1757 newtext, issues = process_patterns(newtext, repository or '',
1757 newtext, issues = process_patterns(newtext, repository or '',
1758 active_entries=active_pattern_entries)
1758 active_entries=active_pattern_entries)
1759
1759
1760 return literal(newtext)
1760 return literal(newtext)
1761
1761
1762
1762
1763 def render_binary(repo_name, file_obj):
1763 def render_binary(repo_name, file_obj):
1764 """
1764 """
1765 Choose how to render a binary file
1765 Choose how to render a binary file
1766 """
1766 """
1767 filename = file_obj.name
1767 filename = file_obj.name
1768
1768
1769 # images
1769 # images
1770 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1770 for ext in ['*.png', '*.jpg', '*.ico', '*.gif']:
1771 if fnmatch.fnmatch(filename, pat=ext):
1771 if fnmatch.fnmatch(filename, pat=ext):
1772 alt = filename
1772 alt = filename
1773 src = route_path(
1773 src = route_path(
1774 'repo_file_raw', repo_name=repo_name,
1774 'repo_file_raw', repo_name=repo_name,
1775 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1775 commit_id=file_obj.commit.raw_id, f_path=file_obj.path)
1776 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1776 return literal('<img class="rendered-binary" alt="{}" src="{}">'.format(alt, src))
1777
1777
1778
1778
1779 def renderer_from_filename(filename, exclude=None):
1779 def renderer_from_filename(filename, exclude=None):
1780 """
1780 """
1781 choose a renderer based on filename, this works only for text based files
1781 choose a renderer based on filename, this works only for text based files
1782 """
1782 """
1783
1783
1784 # ipython
1784 # ipython
1785 for ext in ['*.ipynb']:
1785 for ext in ['*.ipynb']:
1786 if fnmatch.fnmatch(filename, pat=ext):
1786 if fnmatch.fnmatch(filename, pat=ext):
1787 return 'jupyter'
1787 return 'jupyter'
1788
1788
1789 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1789 is_markup = MarkupRenderer.renderer_from_filename(filename, exclude=exclude)
1790 if is_markup:
1790 if is_markup:
1791 return is_markup
1791 return is_markup
1792 return None
1792 return None
1793
1793
1794
1794
1795 def render(source, renderer='rst', mentions=False, relative_urls=None,
1795 def render(source, renderer='rst', mentions=False, relative_urls=None,
1796 repo_name=None):
1796 repo_name=None):
1797
1797
1798 def maybe_convert_relative_links(html_source):
1798 def maybe_convert_relative_links(html_source):
1799 if relative_urls:
1799 if relative_urls:
1800 return relative_links(html_source, relative_urls)
1800 return relative_links(html_source, relative_urls)
1801 return html_source
1801 return html_source
1802
1802
1803 if renderer == 'rst':
1803 if renderer == 'rst':
1804 if repo_name:
1804 if repo_name:
1805 # process patterns on comments if we pass in repo name
1805 # process patterns on comments if we pass in repo name
1806 source, issues = process_patterns(
1806 source, issues = process_patterns(
1807 source, repo_name, link_format='rst')
1807 source, repo_name, link_format='rst')
1808
1808
1809 return literal(
1809 return literal(
1810 '<div class="rst-block">%s</div>' %
1810 '<div class="rst-block">%s</div>' %
1811 maybe_convert_relative_links(
1811 maybe_convert_relative_links(
1812 MarkupRenderer.rst(source, mentions=mentions)))
1812 MarkupRenderer.rst(source, mentions=mentions)))
1813 elif renderer == 'markdown':
1813 elif renderer == 'markdown':
1814 if repo_name:
1814 if repo_name:
1815 # process patterns on comments if we pass in repo name
1815 # process patterns on comments if we pass in repo name
1816 source, issues = process_patterns(
1816 source, issues = process_patterns(
1817 source, repo_name, link_format='markdown')
1817 source, repo_name, link_format='markdown')
1818
1818
1819 return literal(
1819 return literal(
1820 '<div class="markdown-block">%s</div>' %
1820 '<div class="markdown-block">%s</div>' %
1821 maybe_convert_relative_links(
1821 maybe_convert_relative_links(
1822 MarkupRenderer.markdown(source, flavored=True,
1822 MarkupRenderer.markdown(source, flavored=True,
1823 mentions=mentions)))
1823 mentions=mentions)))
1824 elif renderer == 'jupyter':
1824 elif renderer == 'jupyter':
1825 return literal(
1825 return literal(
1826 '<div class="ipynb">%s</div>' %
1826 '<div class="ipynb">%s</div>' %
1827 maybe_convert_relative_links(
1827 maybe_convert_relative_links(
1828 MarkupRenderer.jupyter(source)))
1828 MarkupRenderer.jupyter(source)))
1829
1829
1830 # None means just show the file-source
1830 # None means just show the file-source
1831 return None
1831 return None
1832
1832
1833
1833
1834 def commit_status(repo, commit_id):
1834 def commit_status(repo, commit_id):
1835 return ChangesetStatusModel().get_status(repo, commit_id)
1835 return ChangesetStatusModel().get_status(repo, commit_id)
1836
1836
1837
1837
1838 def commit_status_lbl(commit_status):
1838 def commit_status_lbl(commit_status):
1839 return dict(ChangesetStatus.STATUSES).get(commit_status)
1839 return dict(ChangesetStatus.STATUSES).get(commit_status)
1840
1840
1841
1841
1842 def commit_time(repo_name, commit_id):
1842 def commit_time(repo_name, commit_id):
1843 repo = Repository.get_by_repo_name(repo_name)
1843 repo = Repository.get_by_repo_name(repo_name)
1844 commit = repo.get_commit(commit_id=commit_id)
1844 commit = repo.get_commit(commit_id=commit_id)
1845 return commit.date
1845 return commit.date
1846
1846
1847
1847
1848 def get_permission_name(key):
1848 def get_permission_name(key):
1849 return dict(Permission.PERMS).get(key)
1849 return dict(Permission.PERMS).get(key)
1850
1850
1851
1851
1852 def journal_filter_help(request):
1852 def journal_filter_help(request):
1853 _ = request.translate
1853 _ = request.translate
1854
1854
1855 return _(
1855 return _(
1856 'Example filter terms:\n' +
1856 'Example filter terms:\n' +
1857 ' repository:vcs\n' +
1857 ' repository:vcs\n' +
1858 ' username:marcin\n' +
1858 ' username:marcin\n' +
1859 ' username:(NOT marcin)\n' +
1859 ' username:(NOT marcin)\n' +
1860 ' action:*push*\n' +
1860 ' action:*push*\n' +
1861 ' ip:127.0.0.1\n' +
1861 ' ip:127.0.0.1\n' +
1862 ' date:20120101\n' +
1862 ' date:20120101\n' +
1863 ' date:[20120101100000 TO 20120102]\n' +
1863 ' date:[20120101100000 TO 20120102]\n' +
1864 '\n' +
1864 '\n' +
1865 'Generate wildcards using \'*\' character:\n' +
1865 'Generate wildcards using \'*\' character:\n' +
1866 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1866 ' "repository:vcs*" - search everything starting with \'vcs\'\n' +
1867 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1867 ' "repository:*vcs*" - search for repository containing \'vcs\'\n' +
1868 '\n' +
1868 '\n' +
1869 'Optional AND / OR operators in queries\n' +
1869 'Optional AND / OR operators in queries\n' +
1870 ' "repository:vcs OR repository:test"\n' +
1870 ' "repository:vcs OR repository:test"\n' +
1871 ' "username:test AND repository:test*"\n'
1871 ' "username:test AND repository:test*"\n'
1872 )
1872 )
1873
1873
1874
1874
1875 def search_filter_help(searcher, request):
1875 def search_filter_help(searcher, request):
1876 _ = request.translate
1876 _ = request.translate
1877
1877
1878 terms = ''
1878 terms = ''
1879 return _(
1879 return _(
1880 'Example filter terms for `{searcher}` search:\n' +
1880 'Example filter terms for `{searcher}` search:\n' +
1881 '{terms}\n' +
1881 '{terms}\n' +
1882 'Generate wildcards using \'*\' character:\n' +
1882 'Generate wildcards using \'*\' character:\n' +
1883 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1883 ' "repo_name:vcs*" - search everything starting with \'vcs\'\n' +
1884 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1884 ' "repo_name:*vcs*" - search for repository containing \'vcs\'\n' +
1885 '\n' +
1885 '\n' +
1886 'Optional AND / OR operators in queries\n' +
1886 'Optional AND / OR operators in queries\n' +
1887 ' "repo_name:vcs OR repo_name:test"\n' +
1887 ' "repo_name:vcs OR repo_name:test"\n' +
1888 ' "owner:test AND repo_name:test*"\n' +
1888 ' "owner:test AND repo_name:test*"\n' +
1889 'More: {search_doc}'
1889 'More: {search_doc}'
1890 ).format(searcher=searcher.name,
1890 ).format(searcher=searcher.name,
1891 terms=terms, search_doc=searcher.query_lang_doc)
1891 terms=terms, search_doc=searcher.query_lang_doc)
1892
1892
1893
1893
1894 def not_mapped_error(repo_name):
1894 def not_mapped_error(repo_name):
1895 from rhodecode.translation import _
1895 from rhodecode.translation import _
1896 flash(_('%s repository is not mapped to db perhaps'
1896 flash(_('%s repository is not mapped to db perhaps'
1897 ' it was created or renamed from the filesystem'
1897 ' it was created or renamed from the filesystem'
1898 ' please run the application again'
1898 ' please run the application again'
1899 ' in order to rescan repositories') % repo_name, category='error')
1899 ' in order to rescan repositories') % repo_name, category='error')
1900
1900
1901
1901
1902 def ip_range(ip_addr):
1902 def ip_range(ip_addr):
1903 from rhodecode.model.db import UserIpMap
1903 from rhodecode.model.db import UserIpMap
1904 s, e = UserIpMap._get_ip_range(ip_addr)
1904 s, e = UserIpMap._get_ip_range(ip_addr)
1905 return '%s - %s' % (s, e)
1905 return '%s - %s' % (s, e)
1906
1906
1907
1907
1908 def form(url, method='post', needs_csrf_token=True, **attrs):
1908 def form(url, method='post', needs_csrf_token=True, **attrs):
1909 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1909 """Wrapper around webhelpers.tags.form to prevent CSRF attacks."""
1910 if method.lower() != 'get' and needs_csrf_token:
1910 if method.lower() != 'get' and needs_csrf_token:
1911 raise Exception(
1911 raise Exception(
1912 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1912 'Forms to POST/PUT/DELETE endpoints should have (in general) a ' +
1913 'CSRF token. If the endpoint does not require such token you can ' +
1913 'CSRF token. If the endpoint does not require such token you can ' +
1914 'explicitly set the parameter needs_csrf_token to false.')
1914 'explicitly set the parameter needs_csrf_token to false.')
1915
1915
1916 return wh_form(url, method=method, **attrs)
1916 return wh_form(url, method=method, **attrs)
1917
1917
1918
1918
1919 def secure_form(form_url, method="POST", multipart=False, **attrs):
1919 def secure_form(form_url, method="POST", multipart=False, **attrs):
1920 """Start a form tag that points the action to an url. This
1920 """Start a form tag that points the action to an url. This
1921 form tag will also include the hidden field containing
1921 form tag will also include the hidden field containing
1922 the auth token.
1922 the auth token.
1923
1923
1924 The url options should be given either as a string, or as a
1924 The url options should be given either as a string, or as a
1925 ``url()`` function. The method for the form defaults to POST.
1925 ``url()`` function. The method for the form defaults to POST.
1926
1926
1927 Options:
1927 Options:
1928
1928
1929 ``multipart``
1929 ``multipart``
1930 If set to True, the enctype is set to "multipart/form-data".
1930 If set to True, the enctype is set to "multipart/form-data".
1931 ``method``
1931 ``method``
1932 The method to use when submitting the form, usually either
1932 The method to use when submitting the form, usually either
1933 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1933 "GET" or "POST". If "PUT", "DELETE", or another verb is used, a
1934 hidden input with name _method is added to simulate the verb
1934 hidden input with name _method is added to simulate the verb
1935 over POST.
1935 over POST.
1936
1936
1937 """
1937 """
1938 from webhelpers.pylonslib.secure_form import insecure_form
1938 from webhelpers.pylonslib.secure_form import insecure_form
1939
1939
1940 if 'request' in attrs:
1940 if 'request' in attrs:
1941 session = attrs['request'].session
1941 session = attrs['request'].session
1942 del attrs['request']
1942 del attrs['request']
1943 else:
1943 else:
1944 raise ValueError(
1944 raise ValueError(
1945 'Calling this form requires request= to be passed as argument')
1945 'Calling this form requires request= to be passed as argument')
1946
1946
1947 form = insecure_form(form_url, method, multipart, **attrs)
1947 form = insecure_form(form_url, method, multipart, **attrs)
1948 token = literal(
1948 token = literal(
1949 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1949 '<input type="hidden" id="{}" name="{}" value="{}">'.format(
1950 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1950 csrf_token_key, csrf_token_key, get_csrf_token(session)))
1951
1951
1952 return literal("%s\n%s" % (form, token))
1952 return literal("%s\n%s" % (form, token))
1953
1953
1954
1954
1955 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1955 def dropdownmenu(name, selected, options, enable_filter=False, **attrs):
1956 select_html = select(name, selected, options, **attrs)
1956 select_html = select(name, selected, options, **attrs)
1957 select2 = """
1957 select2 = """
1958 <script>
1958 <script>
1959 $(document).ready(function() {
1959 $(document).ready(function() {
1960 $('#%s').select2({
1960 $('#%s').select2({
1961 containerCssClass: 'drop-menu',
1961 containerCssClass: 'drop-menu',
1962 dropdownCssClass: 'drop-menu-dropdown',
1962 dropdownCssClass: 'drop-menu-dropdown',
1963 dropdownAutoWidth: true%s
1963 dropdownAutoWidth: true%s
1964 });
1964 });
1965 });
1965 });
1966 </script>
1966 </script>
1967 """
1967 """
1968 filter_option = """,
1968 filter_option = """,
1969 minimumResultsForSearch: -1
1969 minimumResultsForSearch: -1
1970 """
1970 """
1971 input_id = attrs.get('id') or name
1971 input_id = attrs.get('id') or name
1972 filter_enabled = "" if enable_filter else filter_option
1972 filter_enabled = "" if enable_filter else filter_option
1973 select_script = literal(select2 % (input_id, filter_enabled))
1973 select_script = literal(select2 % (input_id, filter_enabled))
1974
1974
1975 return literal(select_html+select_script)
1975 return literal(select_html+select_script)
1976
1976
1977
1977
1978 def get_visual_attr(tmpl_context_var, attr_name):
1978 def get_visual_attr(tmpl_context_var, attr_name):
1979 """
1979 """
1980 A safe way to get a variable from visual variable of template context
1980 A safe way to get a variable from visual variable of template context
1981
1981
1982 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1982 :param tmpl_context_var: instance of tmpl_context, usually present as `c`
1983 :param attr_name: name of the attribute we fetch from the c.visual
1983 :param attr_name: name of the attribute we fetch from the c.visual
1984 """
1984 """
1985 visual = getattr(tmpl_context_var, 'visual', None)
1985 visual = getattr(tmpl_context_var, 'visual', None)
1986 if not visual:
1986 if not visual:
1987 return
1987 return
1988 else:
1988 else:
1989 return getattr(visual, attr_name, None)
1989 return getattr(visual, attr_name, None)
1990
1990
1991
1991
1992 def get_last_path_part(file_node):
1992 def get_last_path_part(file_node):
1993 if not file_node.path:
1993 if not file_node.path:
1994 return u''
1994 return u''
1995
1995
1996 path = safe_unicode(file_node.path.split('/')[-1])
1996 path = safe_unicode(file_node.path.split('/')[-1])
1997 return u'../' + path
1997 return u'../' + path
1998
1998
1999
1999
2000 def route_url(*args, **kwargs):
2000 def route_url(*args, **kwargs):
2001 """
2001 """
2002 Wrapper around pyramids `route_url` (fully qualified url) function.
2002 Wrapper around pyramids `route_url` (fully qualified url) function.
2003 """
2003 """
2004 req = get_current_request()
2004 req = get_current_request()
2005 return req.route_url(*args, **kwargs)
2005 return req.route_url(*args, **kwargs)
2006
2006
2007
2007
2008 def route_path(*args, **kwargs):
2008 def route_path(*args, **kwargs):
2009 """
2009 """
2010 Wrapper around pyramids `route_path` function.
2010 Wrapper around pyramids `route_path` function.
2011 """
2011 """
2012 req = get_current_request()
2012 req = get_current_request()
2013 return req.route_path(*args, **kwargs)
2013 return req.route_path(*args, **kwargs)
2014
2014
2015
2015
2016 def route_path_or_none(*args, **kwargs):
2016 def route_path_or_none(*args, **kwargs):
2017 try:
2017 try:
2018 return route_path(*args, **kwargs)
2018 return route_path(*args, **kwargs)
2019 except KeyError:
2019 except KeyError:
2020 return None
2020 return None
2021
2021
2022
2022
2023 def current_route_path(request, **kw):
2023 def current_route_path(request, **kw):
2024 new_args = request.GET.mixed()
2024 new_args = request.GET.mixed()
2025 new_args.update(kw)
2025 new_args.update(kw)
2026 return request.current_route_path(_query=new_args)
2026 return request.current_route_path(_query=new_args)
2027
2027
2028
2028
2029 def api_call_example(method, args):
2029 def api_call_example(method, args):
2030 """
2030 """
2031 Generates an API call example via CURL
2031 Generates an API call example via CURL
2032 """
2032 """
2033 args_json = json.dumps(OrderedDict([
2033 args_json = json.dumps(OrderedDict([
2034 ('id', 1),
2034 ('id', 1),
2035 ('auth_token', 'SECRET'),
2035 ('auth_token', 'SECRET'),
2036 ('method', method),
2036 ('method', method),
2037 ('args', args)
2037 ('args', args)
2038 ]))
2038 ]))
2039 return literal(
2039 return literal(
2040 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2040 "curl {api_url} -X POST -H 'content-type:text/plain' --data-binary '{data}'"
2041 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2041 "<br/><br/>SECRET can be found in <a href=\"{token_url}\">auth-tokens</a> page, "
2042 "and needs to be of `api calls` role."
2042 "and needs to be of `api calls` role."
2043 .format(
2043 .format(
2044 api_url=route_url('apiv2'),
2044 api_url=route_url('apiv2'),
2045 token_url=route_url('my_account_auth_tokens'),
2045 token_url=route_url('my_account_auth_tokens'),
2046 data=args_json))
2046 data=args_json))
2047
2047
2048
2048
2049 def notification_description(notification, request):
2049 def notification_description(notification, request):
2050 """
2050 """
2051 Generate notification human readable description based on notification type
2051 Generate notification human readable description based on notification type
2052 """
2052 """
2053 from rhodecode.model.notification import NotificationModel
2053 from rhodecode.model.notification import NotificationModel
2054 return NotificationModel().make_description(
2054 return NotificationModel().make_description(
2055 notification, translate=request.translate)
2055 notification, translate=request.translate)
2056
2056
2057
2057
2058 def go_import_header(request, db_repo=None):
2058 def go_import_header(request, db_repo=None):
2059 """
2059 """
2060 Creates a header for go-import functionality in Go Lang
2060 Creates a header for go-import functionality in Go Lang
2061 """
2061 """
2062
2062
2063 if not db_repo:
2063 if not db_repo:
2064 return
2064 return
2065 if 'go-get' not in request.GET:
2065 if 'go-get' not in request.GET:
2066 return
2066 return
2067
2067
2068 clone_url = db_repo.clone_url()
2068 clone_url = db_repo.clone_url()
2069 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2069 prefix = re.split(r'^https?:\/\/', clone_url)[-1]
2070 # we have a repo and go-get flag,
2070 # we have a repo and go-get flag,
2071 return literal('<meta name="go-import" content="{} {} {}">'.format(
2071 return literal('<meta name="go-import" content="{} {} {}">'.format(
2072 prefix, db_repo.repo_type, clone_url))
2072 prefix, db_repo.repo_type, clone_url))
2073
2074
2075 def reviewer_as_json(*args, **kwargs):
2076 from rhodecode.apps.repository.utils import reviewer_as_json as _reviewer_as_json
2077 return _reviewer_as_json(*args, **kwargs)
@@ -1,267 +1,393 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import itertools
22 import itertools
23 import logging
23 import logging
24 from collections import defaultdict
24 import collections
25
25
26 from rhodecode.model import BaseModel
26 from rhodecode.model import BaseModel
27 from rhodecode.model.db import (
27 from rhodecode.model.db import (
28 ChangesetStatus, ChangesetComment, PullRequest, Session)
28 ChangesetStatus, ChangesetComment, PullRequest, Session)
29 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
29 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
30 from rhodecode.lib.markup_renderer import (
30 from rhodecode.lib.markup_renderer import (
31 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
31 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 class ChangesetStatusModel(BaseModel):
36 class ChangesetStatusModel(BaseModel):
37
37
38 cls = ChangesetStatus
38 cls = ChangesetStatus
39
39
40 def __get_changeset_status(self, changeset_status):
40 def __get_changeset_status(self, changeset_status):
41 return self._get_instance(ChangesetStatus, changeset_status)
41 return self._get_instance(ChangesetStatus, changeset_status)
42
42
43 def __get_pull_request(self, pull_request):
43 def __get_pull_request(self, pull_request):
44 return self._get_instance(PullRequest, pull_request)
44 return self._get_instance(PullRequest, pull_request)
45
45
46 def _get_status_query(self, repo, revision, pull_request,
46 def _get_status_query(self, repo, revision, pull_request,
47 with_revisions=False):
47 with_revisions=False):
48 repo = self._get_repo(repo)
48 repo = self._get_repo(repo)
49
49
50 q = ChangesetStatus.query()\
50 q = ChangesetStatus.query()\
51 .filter(ChangesetStatus.repo == repo)
51 .filter(ChangesetStatus.repo == repo)
52 if not with_revisions:
52 if not with_revisions:
53 q = q.filter(ChangesetStatus.version == 0)
53 q = q.filter(ChangesetStatus.version == 0)
54
54
55 if revision:
55 if revision:
56 q = q.filter(ChangesetStatus.revision == revision)
56 q = q.filter(ChangesetStatus.revision == revision)
57 elif pull_request:
57 elif pull_request:
58 pull_request = self.__get_pull_request(pull_request)
58 pull_request = self.__get_pull_request(pull_request)
59 # TODO: johbo: Think about the impact of this join, there must
59 # TODO: johbo: Think about the impact of this join, there must
60 # be a reason why ChangesetStatus and ChanagesetComment is linked
60 # be a reason why ChangesetStatus and ChanagesetComment is linked
61 # to the pull request. Might be that we want to do the same for
61 # to the pull request. Might be that we want to do the same for
62 # the pull_request_version_id.
62 # the pull_request_version_id.
63 q = q.join(ChangesetComment).filter(
63 q = q.join(ChangesetComment).filter(
64 ChangesetStatus.pull_request == pull_request,
64 ChangesetStatus.pull_request == pull_request,
65 ChangesetComment.pull_request_version_id == None)
65 ChangesetComment.pull_request_version_id == None)
66 else:
66 else:
67 raise Exception('Please specify revision or pull_request')
67 raise Exception('Please specify revision or pull_request')
68 q = q.order_by(ChangesetStatus.version.asc())
68 q = q.order_by(ChangesetStatus.version.asc())
69 return q
69 return q
70
70
71 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
72 trim_votes=True):
73 """
74 Calculate status based on given group members, and voting rule
75
76
77 group1 - 4 members, 3 required for approval
78 user1 - approved
79 user2 - reject
80 user3 - approved
81 user4 - rejected
82
83 final_state: rejected, reasons not at least 3 votes
84
85
86 group1 - 4 members, 2 required for approval
87 user1 - approved
88 user2 - reject
89 user3 - approved
90 user4 - rejected
91
92 final_state: approved, reasons got at least 2 approvals
93
94 group1 - 4 members, ALL required for approval
95 user1 - approved
96 user2 - reject
97 user3 - approved
98 user4 - rejected
99
100 final_state: rejected, reasons not all approvals
101
102
103 group1 - 4 members, ALL required for approval
104 user1 - approved
105 user2 - approved
106 user3 - approved
107 user4 - approved
108
109 final_state: approved, reason all approvals received
110
111 group1 - 4 members, 5 required for approval
112 (approval should be shorted to number of actual members)
113
114 user1 - approved
115 user2 - approved
116 user3 - approved
117 user4 - approved
118
119 final_state: approved, reason all approvals received
120
121 """
122 group_vote_data = {}
123 got_rule = False
124 members = collections.OrderedDict()
125 for review_obj, user, reasons, mandatory, statuses \
126 in group_statuses_by_reviewers:
127
128 if not got_rule:
129 group_vote_data = review_obj.rule_user_group_data()
130 got_rule = bool(group_vote_data)
131
132 members[user.user_id] = statuses
133
134 if not group_vote_data:
135 return []
136
137 required_votes = group_vote_data['vote_rule']
138 if required_votes == -1:
139 # -1 means all required, so we replace it with how many people
140 # are in the members
141 required_votes = len(members)
142
143 if trim_votes and required_votes > len(members):
144 # we require more votes than we have members in the group
145 # in this case we trim the required votes to the number of members
146 required_votes = len(members)
147
148 approvals = sum([
149 1 for statuses in members.values()
150 if statuses and
151 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
152
153 calculated_votes = []
154 # we have all votes from users, now check if we have enough votes
155 # to fill other
156 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
157 if approvals >= required_votes:
158 fill_in = ChangesetStatus.STATUS_APPROVED
159
160 for member, statuses in members.items():
161 if statuses:
162 ver, latest = statuses[0]
163 if fill_in == ChangesetStatus.STATUS_APPROVED:
164 calculated_votes.append(fill_in)
165 else:
166 calculated_votes.append(latest.status)
167 else:
168 calculated_votes.append(fill_in)
169
170 return calculated_votes
171
71 def calculate_status(self, statuses_by_reviewers):
172 def calculate_status(self, statuses_by_reviewers):
72 """
173 """
73 Given the approval statuses from reviewers, calculates final approval
174 Given the approval statuses from reviewers, calculates final approval
74 status. There can only be 3 results, all approved, all rejected. If
175 status. There can only be 3 results, all approved, all rejected. If
75 there is no consensus the PR is under review.
176 there is no consensus the PR is under review.
76
177
77 :param statuses_by_reviewers:
178 :param statuses_by_reviewers:
78 """
179 """
79 votes = defaultdict(int)
180
181 def group_rule(element):
182 review_obj = element[0]
183 rule_data = review_obj.rule_user_group_data()
184 if rule_data and rule_data['id']:
185 return rule_data['id']
186
187 voting_groups = itertools.groupby(
188 sorted(statuses_by_reviewers, key=group_rule), group_rule)
189
190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
191
80 reviewers_number = len(statuses_by_reviewers)
192 reviewers_number = len(statuses_by_reviewers)
81 for user, reasons, mandatory, statuses in statuses_by_reviewers:
193 votes = collections.defaultdict(int)
82 if statuses:
194 for group, group_statuses_by_reviewers in voting_by_groups:
83 ver, latest = statuses[0]
195 if group:
84 votes[latest.status] += 1
196 # calculate how the "group" voted
197 for vote_status in self.calculate_group_vote(
198 group, group_statuses_by_reviewers):
199 votes[vote_status] += 1
85 else:
200 else:
86 votes[ChangesetStatus.DEFAULT] += 1
201
202 for review_obj, user, reasons, mandatory, statuses \
203 in group_statuses_by_reviewers:
204 # individual vote
205 if statuses:
206 ver, latest = statuses[0]
207 votes[latest.status] += 1
87
208
88 # all approved
209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
89 if votes.get(ChangesetStatus.STATUS_APPROVED) == reviewers_number:
210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
211
212 # TODO(marcink): with group voting, how does rejected work,
213 # do we ever get rejected state ?
214
215 if approved_votes_count == reviewers_number:
90 return ChangesetStatus.STATUS_APPROVED
216 return ChangesetStatus.STATUS_APPROVED
91
217
92 # all rejected
218 if rejected_votes_count == reviewers_number:
93 if votes.get(ChangesetStatus.STATUS_REJECTED) == reviewers_number:
94 return ChangesetStatus.STATUS_REJECTED
219 return ChangesetStatus.STATUS_REJECTED
95
220
96 return ChangesetStatus.STATUS_UNDER_REVIEW
221 return ChangesetStatus.STATUS_UNDER_REVIEW
97
222
98 def get_statuses(self, repo, revision=None, pull_request=None,
223 def get_statuses(self, repo, revision=None, pull_request=None,
99 with_revisions=False):
224 with_revisions=False):
100 q = self._get_status_query(repo, revision, pull_request,
225 q = self._get_status_query(repo, revision, pull_request,
101 with_revisions)
226 with_revisions)
102 return q.all()
227 return q.all()
103
228
104 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
105 """
230 """
106 Returns latest status of changeset for given revision or for given
231 Returns latest status of changeset for given revision or for given
107 pull request. Statuses are versioned inside a table itself and
232 pull request. Statuses are versioned inside a table itself and
108 version == 0 is always the current one
233 version == 0 is always the current one
109
234
110 :param repo:
235 :param repo:
111 :param revision: 40char hash or None
236 :param revision: 40char hash or None
112 :param pull_request: pull_request reference
237 :param pull_request: pull_request reference
113 :param as_str: return status as string not object
238 :param as_str: return status as string not object
114 """
239 """
115 q = self._get_status_query(repo, revision, pull_request)
240 q = self._get_status_query(repo, revision, pull_request)
116
241
117 # need to use first here since there can be multiple statuses
242 # need to use first here since there can be multiple statuses
118 # returned from pull_request
243 # returned from pull_request
119 status = q.first()
244 status = q.first()
120 if as_str:
245 if as_str:
121 status = status.status if status else status
246 status = status.status if status else status
122 st = status or ChangesetStatus.DEFAULT
247 st = status or ChangesetStatus.DEFAULT
123 return str(st)
248 return str(st)
124 return status
249 return status
125
250
126 def _render_auto_status_message(
251 def _render_auto_status_message(
127 self, status, commit_id=None, pull_request=None):
252 self, status, commit_id=None, pull_request=None):
128 """
253 """
129 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
130 so it's always looking the same disregarding on which default
255 so it's always looking the same disregarding on which default
131 renderer system is using.
256 renderer system is using.
132
257
133 :param status: status text to change into
258 :param status: status text to change into
134 :param commit_id: the commit_id we change the status for
259 :param commit_id: the commit_id we change the status for
135 :param pull_request: the pull request we change the status for
260 :param pull_request: the pull request we change the status for
136 """
261 """
137
262
138 new_status = ChangesetStatus.get_status_lbl(status)
263 new_status = ChangesetStatus.get_status_lbl(status)
139
264
140 params = {
265 params = {
141 'new_status_label': new_status,
266 'new_status_label': new_status,
142 'pull_request': pull_request,
267 'pull_request': pull_request,
143 'commit_id': commit_id,
268 'commit_id': commit_id,
144 }
269 }
145 renderer = RstTemplateRenderer()
270 renderer = RstTemplateRenderer()
146 return renderer.render('auto_status_change.mako', **params)
271 return renderer.render('auto_status_change.mako', **params)
147
272
148 def set_status(self, repo, status, user, comment=None, revision=None,
273 def set_status(self, repo, status, user, comment=None, revision=None,
149 pull_request=None, dont_allow_on_closed_pull_request=False):
274 pull_request=None, dont_allow_on_closed_pull_request=False):
150 """
275 """
151 Creates new status for changeset or updates the old ones bumping their
276 Creates new status for changeset or updates the old ones bumping their
152 version, leaving the current status at
277 version, leaving the current status at
153
278
154 :param repo:
279 :param repo:
155 :param revision:
280 :param revision:
156 :param status:
281 :param status:
157 :param user:
282 :param user:
158 :param comment:
283 :param comment:
159 :param dont_allow_on_closed_pull_request: don't allow a status change
284 :param dont_allow_on_closed_pull_request: don't allow a status change
160 if last status was for pull request and it's closed. We shouldn't
285 if last status was for pull request and it's closed. We shouldn't
161 mess around this manually
286 mess around this manually
162 """
287 """
163 repo = self._get_repo(repo)
288 repo = self._get_repo(repo)
164
289
165 q = ChangesetStatus.query()
290 q = ChangesetStatus.query()
166
291
167 if revision:
292 if revision:
168 q = q.filter(ChangesetStatus.repo == repo)
293 q = q.filter(ChangesetStatus.repo == repo)
169 q = q.filter(ChangesetStatus.revision == revision)
294 q = q.filter(ChangesetStatus.revision == revision)
170 elif pull_request:
295 elif pull_request:
171 pull_request = self.__get_pull_request(pull_request)
296 pull_request = self.__get_pull_request(pull_request)
172 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
173 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
174 cur_statuses = q.all()
299 cur_statuses = q.all()
175
300
176 # if statuses exists and last is associated with a closed pull request
301 # if statuses exists and last is associated with a closed pull request
177 # we need to check if we can allow this status change
302 # we need to check if we can allow this status change
178 if (dont_allow_on_closed_pull_request and cur_statuses
303 if (dont_allow_on_closed_pull_request and cur_statuses
179 and getattr(cur_statuses[0].pull_request, 'status', '')
304 and getattr(cur_statuses[0].pull_request, 'status', '')
180 == PullRequest.STATUS_CLOSED):
305 == PullRequest.STATUS_CLOSED):
181 raise StatusChangeOnClosedPullRequestError(
306 raise StatusChangeOnClosedPullRequestError(
182 'Changing status on closed pull request is not allowed'
307 'Changing status on closed pull request is not allowed'
183 )
308 )
184
309
185 # update all current statuses with older version
310 # update all current statuses with older version
186 if cur_statuses:
311 if cur_statuses:
187 for st in cur_statuses:
312 for st in cur_statuses:
188 st.version += 1
313 st.version += 1
189 Session().add(st)
314 Session().add(st)
190
315
191 def _create_status(user, repo, status, comment, revision, pull_request):
316 def _create_status(user, repo, status, comment, revision, pull_request):
192 new_status = ChangesetStatus()
317 new_status = ChangesetStatus()
193 new_status.author = self._get_user(user)
318 new_status.author = self._get_user(user)
194 new_status.repo = self._get_repo(repo)
319 new_status.repo = self._get_repo(repo)
195 new_status.status = status
320 new_status.status = status
196 new_status.comment = comment
321 new_status.comment = comment
197 new_status.revision = revision
322 new_status.revision = revision
198 new_status.pull_request = pull_request
323 new_status.pull_request = pull_request
199 return new_status
324 return new_status
200
325
201 if not comment:
326 if not comment:
202 from rhodecode.model.comment import CommentsModel
327 from rhodecode.model.comment import CommentsModel
203 comment = CommentsModel().create(
328 comment = CommentsModel().create(
204 text=self._render_auto_status_message(
329 text=self._render_auto_status_message(
205 status, commit_id=revision, pull_request=pull_request),
330 status, commit_id=revision, pull_request=pull_request),
206 repo=repo,
331 repo=repo,
207 user=user,
332 user=user,
208 pull_request=pull_request,
333 pull_request=pull_request,
209 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
210 )
335 )
211
336
212 if revision:
337 if revision:
213 new_status = _create_status(
338 new_status = _create_status(
214 user=user, repo=repo, status=status, comment=comment,
339 user=user, repo=repo, status=status, comment=comment,
215 revision=revision, pull_request=pull_request)
340 revision=revision, pull_request=pull_request)
216 Session().add(new_status)
341 Session().add(new_status)
217 return new_status
342 return new_status
218 elif pull_request:
343 elif pull_request:
219 # pull request can have more than one revision associated to it
344 # pull request can have more than one revision associated to it
220 # we need to create new version for each one
345 # we need to create new version for each one
221 new_statuses = []
346 new_statuses = []
222 repo = pull_request.source_repo
347 repo = pull_request.source_repo
223 for rev in pull_request.revisions:
348 for rev in pull_request.revisions:
224 new_status = _create_status(
349 new_status = _create_status(
225 user=user, repo=repo, status=status, comment=comment,
350 user=user, repo=repo, status=status, comment=comment,
226 revision=rev, pull_request=pull_request)
351 revision=rev, pull_request=pull_request)
227 new_statuses.append(new_status)
352 new_statuses.append(new_status)
228 Session().add(new_status)
353 Session().add(new_status)
229 return new_statuses
354 return new_statuses
230
355
231 def reviewers_statuses(self, pull_request):
356 def reviewers_statuses(self, pull_request):
232 _commit_statuses = self.get_statuses(
357 _commit_statuses = self.get_statuses(
233 pull_request.source_repo,
358 pull_request.source_repo,
234 pull_request=pull_request,
359 pull_request=pull_request,
235 with_revisions=True)
360 with_revisions=True)
236
361
237 commit_statuses = defaultdict(list)
362 commit_statuses = collections.defaultdict(list)
238 for st in _commit_statuses:
363 for st in _commit_statuses:
239 commit_statuses[st.author.username] += [st]
364 commit_statuses[st.author.username] += [st]
240
365
241 pull_request_reviewers = []
366 pull_request_reviewers = []
242
367
243 def version(commit_status):
368 def version(commit_status):
244 return commit_status.version
369 return commit_status.version
245
370
246 for o in pull_request.reviewers:
371 for obj in pull_request.reviewers:
247 if not o.user:
372 if not obj.user:
248 continue
373 continue
249 statuses = commit_statuses.get(o.user.username, None)
374 statuses = commit_statuses.get(obj.user.username, None)
250 if statuses:
375 if statuses:
251 statuses = [(x, list(y)[0])
376 status_groups = itertools.groupby(
252 for x, y in (itertools.groupby(
377 sorted(statuses, key=version), version)
253 sorted(statuses, key=version),version))]
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
254
379
255 pull_request_reviewers.append(
380 pull_request_reviewers.append(
256 (o.user, o.reasons, o.mandatory, statuses))
381 (obj, obj.user, obj.reasons, obj.mandatory, statuses))
382
257 return pull_request_reviewers
383 return pull_request_reviewers
258
384
259 def calculated_review_status(self, pull_request, reviewers_statuses=None):
385 def calculated_review_status(self, pull_request, reviewers_statuses=None):
260 """
386 """
261 calculate pull request status based on reviewers, it should be a list
387 calculate pull request status based on reviewers, it should be a list
262 of two element lists.
388 of two element lists.
263
389
264 :param reviewers_statuses:
390 :param reviewers_statuses:
265 """
391 """
266 reviewers = reviewers_statuses or self.reviewers_statuses(pull_request)
392 reviewers = reviewers_statuses or self.reviewers_statuses(pull_request)
267 return self.calculate_status(reviewers)
393 return self.calculate_status(reviewers)
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,614 +1,615 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 this is forms validation classes
22 this is forms validation classes
23 http://formencode.org/module-formencode.validators.html
23 http://formencode.org/module-formencode.validators.html
24 for list off all availible validators
24 for list off all availible validators
25
25
26 we can create our own validators
26 we can create our own validators
27
27
28 The table below outlines the options which can be used in a schema in addition to the validators themselves
28 The table below outlines the options which can be used in a schema in addition to the validators themselves
29 pre_validators [] These validators will be applied before the schema
29 pre_validators [] These validators will be applied before the schema
30 chained_validators [] These validators will be applied after the schema
30 chained_validators [] These validators will be applied after the schema
31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
31 allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present
32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
32 filter_extra_fields False If True, then keys that aren't associated with a validator are removed
33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
33 if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value.
34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
34 ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already
35
35
36
36
37 <name> = formencode.validators.<name of validator>
37 <name> = formencode.validators.<name of validator>
38 <name> must equal form name
38 <name> must equal form name
39 list=[1,2,3,4,5]
39 list=[1,2,3,4,5]
40 for SELECT use formencode.All(OneOf(list), Int())
40 for SELECT use formencode.All(OneOf(list), Int())
41
41
42 """
42 """
43
43
44 import deform
44 import deform
45 import logging
45 import logging
46 import formencode
46 import formencode
47
47
48 from pkg_resources import resource_filename
48 from pkg_resources import resource_filename
49 from formencode import All, Pipe
49 from formencode import All, Pipe
50
50
51 from pyramid.threadlocal import get_current_request
51 from pyramid.threadlocal import get_current_request
52
52
53 from rhodecode import BACKENDS
53 from rhodecode import BACKENDS
54 from rhodecode.lib import helpers
54 from rhodecode.lib import helpers
55 from rhodecode.model import validators as v
55 from rhodecode.model import validators as v
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 deform_templates = resource_filename('deform', 'templates')
60 deform_templates = resource_filename('deform', 'templates')
61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
61 rhodecode_templates = resource_filename('rhodecode', 'templates/forms')
62 search_path = (rhodecode_templates, deform_templates)
62 search_path = (rhodecode_templates, deform_templates)
63
63
64
64
65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
65 class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory):
66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
66 """ Subclass of ZPTRendererFactory to add rhodecode context variables """
67 def __call__(self, template_name, **kw):
67 def __call__(self, template_name, **kw):
68 kw['h'] = helpers
68 kw['h'] = helpers
69 kw['request'] = get_current_request()
69 kw['request'] = get_current_request()
70 return self.load(template_name)(**kw)
70 return self.load(template_name)(**kw)
71
71
72
72
73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
73 form_renderer = RhodecodeFormZPTRendererFactory(search_path)
74 deform.Form.set_default_renderer(form_renderer)
74 deform.Form.set_default_renderer(form_renderer)
75
75
76
76
77 def LoginForm(localizer):
77 def LoginForm(localizer):
78 _ = localizer
78 _ = localizer
79
79
80 class _LoginForm(formencode.Schema):
80 class _LoginForm(formencode.Schema):
81 allow_extra_fields = True
81 allow_extra_fields = True
82 filter_extra_fields = True
82 filter_extra_fields = True
83 username = v.UnicodeString(
83 username = v.UnicodeString(
84 strip=True,
84 strip=True,
85 min=1,
85 min=1,
86 not_empty=True,
86 not_empty=True,
87 messages={
87 messages={
88 'empty': _(u'Please enter a login'),
88 'empty': _(u'Please enter a login'),
89 'tooShort': _(u'Enter a value %(min)i characters long or more')
89 'tooShort': _(u'Enter a value %(min)i characters long or more')
90 }
90 }
91 )
91 )
92
92
93 password = v.UnicodeString(
93 password = v.UnicodeString(
94 strip=False,
94 strip=False,
95 min=3,
95 min=3,
96 max=72,
96 max=72,
97 not_empty=True,
97 not_empty=True,
98 messages={
98 messages={
99 'empty': _(u'Please enter a password'),
99 'empty': _(u'Please enter a password'),
100 'tooShort': _(u'Enter %(min)i characters or more')}
100 'tooShort': _(u'Enter %(min)i characters or more')}
101 )
101 )
102
102
103 remember = v.StringBoolean(if_missing=False)
103 remember = v.StringBoolean(if_missing=False)
104
104
105 chained_validators = [v.ValidAuth(localizer)]
105 chained_validators = [v.ValidAuth(localizer)]
106 return _LoginForm
106 return _LoginForm
107
107
108
108
109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
109 def UserForm(localizer, edit=False, available_languages=None, old_data=None):
110 old_data = old_data or {}
110 old_data = old_data or {}
111 available_languages = available_languages or []
111 available_languages = available_languages or []
112 _ = localizer
112 _ = localizer
113
113
114 class _UserForm(formencode.Schema):
114 class _UserForm(formencode.Schema):
115 allow_extra_fields = True
115 allow_extra_fields = True
116 filter_extra_fields = True
116 filter_extra_fields = True
117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
117 username = All(v.UnicodeString(strip=True, min=1, not_empty=True),
118 v.ValidUsername(localizer, edit, old_data))
118 v.ValidUsername(localizer, edit, old_data))
119 if edit:
119 if edit:
120 new_password = All(
120 new_password = All(
121 v.ValidPassword(localizer),
121 v.ValidPassword(localizer),
122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
122 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
123 )
123 )
124 password_confirmation = All(
124 password_confirmation = All(
125 v.ValidPassword(localizer),
125 v.ValidPassword(localizer),
126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
126 v.UnicodeString(strip=False, min=6, max=72, not_empty=False),
127 )
127 )
128 admin = v.StringBoolean(if_missing=False)
128 admin = v.StringBoolean(if_missing=False)
129 else:
129 else:
130 password = All(
130 password = All(
131 v.ValidPassword(localizer),
131 v.ValidPassword(localizer),
132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
132 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
133 )
133 )
134 password_confirmation = All(
134 password_confirmation = All(
135 v.ValidPassword(localizer),
135 v.ValidPassword(localizer),
136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
136 v.UnicodeString(strip=False, min=6, max=72, not_empty=False)
137 )
137 )
138
138
139 password_change = v.StringBoolean(if_missing=False)
139 password_change = v.StringBoolean(if_missing=False)
140 create_repo_group = v.StringBoolean(if_missing=False)
140 create_repo_group = v.StringBoolean(if_missing=False)
141
141
142 active = v.StringBoolean(if_missing=False)
142 active = v.StringBoolean(if_missing=False)
143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
143 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
144 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
145 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
146 extern_name = v.UnicodeString(strip=True)
146 extern_name = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
147 extern_type = v.UnicodeString(strip=True)
148 language = v.OneOf(available_languages, hideList=False,
148 language = v.OneOf(available_languages, hideList=False,
149 testValueList=True, if_missing=None)
149 testValueList=True, if_missing=None)
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
150 chained_validators = [v.ValidPasswordsMatch(localizer)]
151 return _UserForm
151 return _UserForm
152
152
153
153
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
154 def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False):
155 old_data = old_data or {}
155 old_data = old_data or {}
156 _ = localizer
156 _ = localizer
157
157
158 class _UserGroupForm(formencode.Schema):
158 class _UserGroupForm(formencode.Schema):
159 allow_extra_fields = True
159 allow_extra_fields = True
160 filter_extra_fields = True
160 filter_extra_fields = True
161
161
162 users_group_name = All(
162 users_group_name = All(
163 v.UnicodeString(strip=True, min=1, not_empty=True),
163 v.UnicodeString(strip=True, min=1, not_empty=True),
164 v.ValidUserGroup(localizer, edit, old_data)
164 v.ValidUserGroup(localizer, edit, old_data)
165 )
165 )
166 user_group_description = v.UnicodeString(strip=True, min=1,
166 user_group_description = v.UnicodeString(strip=True, min=1,
167 not_empty=False)
167 not_empty=False)
168
168
169 users_group_active = v.StringBoolean(if_missing=False)
169 users_group_active = v.StringBoolean(if_missing=False)
170
170
171 if edit:
171 if edit:
172 # this is user group owner
172 # this is user group owner
173 user = All(
173 user = All(
174 v.UnicodeString(not_empty=True),
174 v.UnicodeString(not_empty=True),
175 v.ValidRepoUser(localizer, allow_disabled))
175 v.ValidRepoUser(localizer, allow_disabled))
176 return _UserGroupForm
176 return _UserGroupForm
177
177
178
178
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
179 def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None,
180 can_create_in_root=False, allow_disabled=False):
180 can_create_in_root=False, allow_disabled=False):
181 _ = localizer
181 _ = localizer
182 old_data = old_data or {}
182 old_data = old_data or {}
183 available_groups = available_groups or []
183 available_groups = available_groups or []
184
184
185 class _RepoGroupForm(formencode.Schema):
185 class _RepoGroupForm(formencode.Schema):
186 allow_extra_fields = True
186 allow_extra_fields = True
187 filter_extra_fields = False
187 filter_extra_fields = False
188
188
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
189 group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
190 v.SlugifyName(localizer),)
190 v.SlugifyName(localizer),)
191 group_description = v.UnicodeString(strip=True, min=1,
191 group_description = v.UnicodeString(strip=True, min=1,
192 not_empty=False)
192 not_empty=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
193 group_copy_permissions = v.StringBoolean(if_missing=False)
194
194
195 group_parent_id = v.OneOf(available_groups, hideList=False,
195 group_parent_id = v.OneOf(available_groups, hideList=False,
196 testValueList=True, not_empty=True)
196 testValueList=True, not_empty=True)
197 enable_locking = v.StringBoolean(if_missing=False)
197 enable_locking = v.StringBoolean(if_missing=False)
198 chained_validators = [
198 chained_validators = [
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
199 v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)]
200
200
201 if edit:
201 if edit:
202 # this is repo group owner
202 # this is repo group owner
203 user = All(
203 user = All(
204 v.UnicodeString(not_empty=True),
204 v.UnicodeString(not_empty=True),
205 v.ValidRepoUser(localizer, allow_disabled))
205 v.ValidRepoUser(localizer, allow_disabled))
206 return _RepoGroupForm
206 return _RepoGroupForm
207
207
208
208
209 def RegisterForm(localizer, edit=False, old_data=None):
209 def RegisterForm(localizer, edit=False, old_data=None):
210 _ = localizer
210 _ = localizer
211 old_data = old_data or {}
211 old_data = old_data or {}
212
212
213 class _RegisterForm(formencode.Schema):
213 class _RegisterForm(formencode.Schema):
214 allow_extra_fields = True
214 allow_extra_fields = True
215 filter_extra_fields = True
215 filter_extra_fields = True
216 username = All(
216 username = All(
217 v.ValidUsername(localizer, edit, old_data),
217 v.ValidUsername(localizer, edit, old_data),
218 v.UnicodeString(strip=True, min=1, not_empty=True)
218 v.UnicodeString(strip=True, min=1, not_empty=True)
219 )
219 )
220 password = All(
220 password = All(
221 v.ValidPassword(localizer),
221 v.ValidPassword(localizer),
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
222 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
223 )
223 )
224 password_confirmation = All(
224 password_confirmation = All(
225 v.ValidPassword(localizer),
225 v.ValidPassword(localizer),
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
226 v.UnicodeString(strip=False, min=6, max=72, not_empty=True)
227 )
227 )
228 active = v.StringBoolean(if_missing=False)
228 active = v.StringBoolean(if_missing=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
229 firstname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
230 lastname = v.UnicodeString(strip=True, min=1, not_empty=False)
231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
231 email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True))
232
232
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
233 chained_validators = [v.ValidPasswordsMatch(localizer)]
234 return _RegisterForm
234 return _RegisterForm
235
235
236
236
237 def PasswordResetForm(localizer):
237 def PasswordResetForm(localizer):
238 _ = localizer
238 _ = localizer
239
239
240 class _PasswordResetForm(formencode.Schema):
240 class _PasswordResetForm(formencode.Schema):
241 allow_extra_fields = True
241 allow_extra_fields = True
242 filter_extra_fields = True
242 filter_extra_fields = True
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
243 email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True))
244 return _PasswordResetForm
244 return _PasswordResetForm
245
245
246
246
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
247 def RepoForm(localizer, edit=False, old_data=None, repo_groups=None,
248 landing_revs=None, allow_disabled=False):
248 landing_revs=None, allow_disabled=False):
249 _ = localizer
249 _ = localizer
250 old_data = old_data or {}
250 old_data = old_data or {}
251 repo_groups = repo_groups or []
251 repo_groups = repo_groups or []
252 landing_revs = landing_revs or []
252 landing_revs = landing_revs or []
253 supported_backends = BACKENDS.keys()
253 supported_backends = BACKENDS.keys()
254
254
255 class _RepoForm(formencode.Schema):
255 class _RepoForm(formencode.Schema):
256 allow_extra_fields = True
256 allow_extra_fields = True
257 filter_extra_fields = False
257 filter_extra_fields = False
258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
258 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
259 v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer))
260 repo_group = All(v.CanWriteGroup(localizer, old_data),
260 repo_group = All(v.CanWriteGroup(localizer, old_data),
261 v.OneOf(repo_groups, hideList=True))
261 v.OneOf(repo_groups, hideList=True))
262 repo_type = v.OneOf(supported_backends, required=False,
262 repo_type = v.OneOf(supported_backends, required=False,
263 if_missing=old_data.get('repo_type'))
263 if_missing=old_data.get('repo_type'))
264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
264 repo_description = v.UnicodeString(strip=True, min=1, not_empty=False)
265 repo_private = v.StringBoolean(if_missing=False)
265 repo_private = v.StringBoolean(if_missing=False)
266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
266 repo_landing_rev = v.OneOf(landing_revs, hideList=True)
267 repo_copy_permissions = v.StringBoolean(if_missing=False)
267 repo_copy_permissions = v.StringBoolean(if_missing=False)
268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
268 clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False))
269
269
270 repo_enable_statistics = v.StringBoolean(if_missing=False)
270 repo_enable_statistics = v.StringBoolean(if_missing=False)
271 repo_enable_downloads = v.StringBoolean(if_missing=False)
271 repo_enable_downloads = v.StringBoolean(if_missing=False)
272 repo_enable_locking = v.StringBoolean(if_missing=False)
272 repo_enable_locking = v.StringBoolean(if_missing=False)
273
273
274 if edit:
274 if edit:
275 # this is repo owner
275 # this is repo owner
276 user = All(
276 user = All(
277 v.UnicodeString(not_empty=True),
277 v.UnicodeString(not_empty=True),
278 v.ValidRepoUser(localizer, allow_disabled))
278 v.ValidRepoUser(localizer, allow_disabled))
279 clone_uri_change = v.UnicodeString(
279 clone_uri_change = v.UnicodeString(
280 not_empty=False, if_missing=v.Missing)
280 not_empty=False, if_missing=v.Missing)
281
281
282 chained_validators = [v.ValidCloneUri(localizer),
282 chained_validators = [v.ValidCloneUri(localizer),
283 v.ValidRepoName(localizer, edit, old_data)]
283 v.ValidRepoName(localizer, edit, old_data)]
284 return _RepoForm
284 return _RepoForm
285
285
286
286
287 def RepoPermsForm(localizer):
287 def RepoPermsForm(localizer):
288 _ = localizer
288 _ = localizer
289
289
290 class _RepoPermsForm(formencode.Schema):
290 class _RepoPermsForm(formencode.Schema):
291 allow_extra_fields = True
291 allow_extra_fields = True
292 filter_extra_fields = False
292 filter_extra_fields = False
293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
293 chained_validators = [v.ValidPerms(localizer, type_='repo')]
294 return _RepoPermsForm
294 return _RepoPermsForm
295
295
296
296
297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
297 def RepoGroupPermsForm(localizer, valid_recursive_choices):
298 _ = localizer
298 _ = localizer
299
299
300 class _RepoGroupPermsForm(formencode.Schema):
300 class _RepoGroupPermsForm(formencode.Schema):
301 allow_extra_fields = True
301 allow_extra_fields = True
302 filter_extra_fields = False
302 filter_extra_fields = False
303 recursive = v.OneOf(valid_recursive_choices)
303 recursive = v.OneOf(valid_recursive_choices)
304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
304 chained_validators = [v.ValidPerms(localizer, type_='repo_group')]
305 return _RepoGroupPermsForm
305 return _RepoGroupPermsForm
306
306
307
307
308 def UserGroupPermsForm(localizer):
308 def UserGroupPermsForm(localizer):
309 _ = localizer
309 _ = localizer
310
310
311 class _UserPermsForm(formencode.Schema):
311 class _UserPermsForm(formencode.Schema):
312 allow_extra_fields = True
312 allow_extra_fields = True
313 filter_extra_fields = False
313 filter_extra_fields = False
314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
314 chained_validators = [v.ValidPerms(localizer, type_='user_group')]
315 return _UserPermsForm
315 return _UserPermsForm
316
316
317
317
318 def RepoFieldForm(localizer):
318 def RepoFieldForm(localizer):
319 _ = localizer
319 _ = localizer
320
320
321 class _RepoFieldForm(formencode.Schema):
321 class _RepoFieldForm(formencode.Schema):
322 filter_extra_fields = True
322 filter_extra_fields = True
323 allow_extra_fields = True
323 allow_extra_fields = True
324
324
325 new_field_key = All(v.FieldKey(localizer),
325 new_field_key = All(v.FieldKey(localizer),
326 v.UnicodeString(strip=True, min=3, not_empty=True))
326 v.UnicodeString(strip=True, min=3, not_empty=True))
327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
327 new_field_value = v.UnicodeString(not_empty=False, if_missing=u'')
328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
328 new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'],
329 if_missing='str')
329 if_missing='str')
330 new_field_label = v.UnicodeString(not_empty=False)
330 new_field_label = v.UnicodeString(not_empty=False)
331 new_field_desc = v.UnicodeString(not_empty=False)
331 new_field_desc = v.UnicodeString(not_empty=False)
332 return _RepoFieldForm
332 return _RepoFieldForm
333
333
334
334
335 def RepoForkForm(localizer, edit=False, old_data=None,
335 def RepoForkForm(localizer, edit=False, old_data=None,
336 supported_backends=BACKENDS.keys(), repo_groups=None,
336 supported_backends=BACKENDS.keys(), repo_groups=None,
337 landing_revs=None):
337 landing_revs=None):
338 _ = localizer
338 _ = localizer
339 old_data = old_data or {}
339 old_data = old_data or {}
340 repo_groups = repo_groups or []
340 repo_groups = repo_groups or []
341 landing_revs = landing_revs or []
341 landing_revs = landing_revs or []
342
342
343 class _RepoForkForm(formencode.Schema):
343 class _RepoForkForm(formencode.Schema):
344 allow_extra_fields = True
344 allow_extra_fields = True
345 filter_extra_fields = False
345 filter_extra_fields = False
346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
346 repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True),
347 v.SlugifyName(localizer))
347 v.SlugifyName(localizer))
348 repo_group = All(v.CanWriteGroup(localizer, ),
348 repo_group = All(v.CanWriteGroup(localizer, ),
349 v.OneOf(repo_groups, hideList=True))
349 v.OneOf(repo_groups, hideList=True))
350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
350 repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends))
351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
351 description = v.UnicodeString(strip=True, min=1, not_empty=True)
352 private = v.StringBoolean(if_missing=False)
352 private = v.StringBoolean(if_missing=False)
353 copy_permissions = v.StringBoolean(if_missing=False)
353 copy_permissions = v.StringBoolean(if_missing=False)
354 fork_parent_id = v.UnicodeString()
354 fork_parent_id = v.UnicodeString()
355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
355 chained_validators = [v.ValidForkName(localizer, edit, old_data)]
356 landing_rev = v.OneOf(landing_revs, hideList=True)
356 landing_rev = v.OneOf(landing_revs, hideList=True)
357 return _RepoForkForm
357 return _RepoForkForm
358
358
359
359
360 def ApplicationSettingsForm(localizer):
360 def ApplicationSettingsForm(localizer):
361 _ = localizer
361 _ = localizer
362
362
363 class _ApplicationSettingsForm(formencode.Schema):
363 class _ApplicationSettingsForm(formencode.Schema):
364 allow_extra_fields = True
364 allow_extra_fields = True
365 filter_extra_fields = False
365 filter_extra_fields = False
366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
366 rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False)
367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
367 rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True)
368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
368 rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
369 rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False)
370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
370 rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
371 rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False)
372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
372 rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False)
373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
373 rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False)
374 return _ApplicationSettingsForm
374 return _ApplicationSettingsForm
375
375
376
376
377 def ApplicationVisualisationForm(localizer):
377 def ApplicationVisualisationForm(localizer):
378 _ = localizer
378 _ = localizer
379
379
380 class _ApplicationVisualisationForm(formencode.Schema):
380 class _ApplicationVisualisationForm(formencode.Schema):
381 allow_extra_fields = True
381 allow_extra_fields = True
382 filter_extra_fields = False
382 filter_extra_fields = False
383 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
383 rhodecode_show_public_icon = v.StringBoolean(if_missing=False)
384 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
384 rhodecode_show_private_icon = v.StringBoolean(if_missing=False)
385 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
385 rhodecode_stylify_metatags = v.StringBoolean(if_missing=False)
386
386
387 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
387 rhodecode_repository_fields = v.StringBoolean(if_missing=False)
388 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
388 rhodecode_lightweight_journal = v.StringBoolean(if_missing=False)
389 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
389 rhodecode_dashboard_items = v.Int(min=5, not_empty=True)
390 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
390 rhodecode_admin_grid_items = v.Int(min=5, not_empty=True)
391 rhodecode_show_version = v.StringBoolean(if_missing=False)
391 rhodecode_show_version = v.StringBoolean(if_missing=False)
392 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
392 rhodecode_use_gravatar = v.StringBoolean(if_missing=False)
393 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
393 rhodecode_markup_renderer = v.OneOf(['markdown', 'rst'])
394 rhodecode_gravatar_url = v.UnicodeString(min=3)
394 rhodecode_gravatar_url = v.UnicodeString(min=3)
395 rhodecode_clone_uri_tmpl = v.UnicodeString(min=3)
395 rhodecode_clone_uri_tmpl = v.UnicodeString(min=3)
396 rhodecode_support_url = v.UnicodeString()
396 rhodecode_support_url = v.UnicodeString()
397 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
397 rhodecode_show_revision_number = v.StringBoolean(if_missing=False)
398 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
398 rhodecode_show_sha_length = v.Int(min=4, not_empty=True)
399 return _ApplicationVisualisationForm
399 return _ApplicationVisualisationForm
400
400
401
401
402 class _BaseVcsSettingsForm(formencode.Schema):
402 class _BaseVcsSettingsForm(formencode.Schema):
403
403
404 allow_extra_fields = True
404 allow_extra_fields = True
405 filter_extra_fields = False
405 filter_extra_fields = False
406 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
406 hooks_changegroup_repo_size = v.StringBoolean(if_missing=False)
407 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
407 hooks_changegroup_push_logger = v.StringBoolean(if_missing=False)
408 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
408 hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False)
409
409
410 # PR/Code-review
410 # PR/Code-review
411 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
411 rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False)
412 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
412 rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False)
413
413
414 # hg
414 # hg
415 extensions_largefiles = v.StringBoolean(if_missing=False)
415 extensions_largefiles = v.StringBoolean(if_missing=False)
416 extensions_evolve = v.StringBoolean(if_missing=False)
416 extensions_evolve = v.StringBoolean(if_missing=False)
417 phases_publish = v.StringBoolean(if_missing=False)
417 phases_publish = v.StringBoolean(if_missing=False)
418
418
419 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
419 rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False)
420 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
420 rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False)
421
421
422 # git
422 # git
423 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
423 vcs_git_lfs_enabled = v.StringBoolean(if_missing=False)
424 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
424 rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False)
425 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
425 rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False)
426
426
427 # svn
427 # svn
428 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
428 vcs_svn_proxy_http_requests_enabled = v.StringBoolean(if_missing=False)
429 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
429 vcs_svn_proxy_http_server_url = v.UnicodeString(strip=True, if_missing=None)
430
430
431
431
432 def ApplicationUiSettingsForm(localizer):
432 def ApplicationUiSettingsForm(localizer):
433 _ = localizer
433 _ = localizer
434
434
435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
435 class _ApplicationUiSettingsForm(_BaseVcsSettingsForm):
436 web_push_ssl = v.StringBoolean(if_missing=False)
436 web_push_ssl = v.StringBoolean(if_missing=False)
437 paths_root_path = All(
437 paths_root_path = All(
438 v.ValidPath(localizer),
438 v.ValidPath(localizer),
439 v.UnicodeString(strip=True, min=1, not_empty=True)
439 v.UnicodeString(strip=True, min=1, not_empty=True)
440 )
440 )
441 largefiles_usercache = All(
441 largefiles_usercache = All(
442 v.ValidPath(localizer),
442 v.ValidPath(localizer),
443 v.UnicodeString(strip=True, min=2, not_empty=True))
443 v.UnicodeString(strip=True, min=2, not_empty=True))
444 vcs_git_lfs_store_location = All(
444 vcs_git_lfs_store_location = All(
445 v.ValidPath(localizer),
445 v.ValidPath(localizer),
446 v.UnicodeString(strip=True, min=2, not_empty=True))
446 v.UnicodeString(strip=True, min=2, not_empty=True))
447 extensions_hgsubversion = v.StringBoolean(if_missing=False)
447 extensions_hgsubversion = v.StringBoolean(if_missing=False)
448 extensions_hggit = v.StringBoolean(if_missing=False)
448 extensions_hggit = v.StringBoolean(if_missing=False)
449 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
449 new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch')
450 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
450 new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag')
451 return _ApplicationUiSettingsForm
451 return _ApplicationUiSettingsForm
452
452
453
453
454 def RepoVcsSettingsForm(localizer, repo_name):
454 def RepoVcsSettingsForm(localizer, repo_name):
455 _ = localizer
455 _ = localizer
456
456
457 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
457 class _RepoVcsSettingsForm(_BaseVcsSettingsForm):
458 inherit_global_settings = v.StringBoolean(if_missing=False)
458 inherit_global_settings = v.StringBoolean(if_missing=False)
459 new_svn_branch = v.ValidSvnPattern(localizer,
459 new_svn_branch = v.ValidSvnPattern(localizer,
460 section='vcs_svn_branch', repo_name=repo_name)
460 section='vcs_svn_branch', repo_name=repo_name)
461 new_svn_tag = v.ValidSvnPattern(localizer,
461 new_svn_tag = v.ValidSvnPattern(localizer,
462 section='vcs_svn_tag', repo_name=repo_name)
462 section='vcs_svn_tag', repo_name=repo_name)
463 return _RepoVcsSettingsForm
463 return _RepoVcsSettingsForm
464
464
465
465
466 def LabsSettingsForm(localizer):
466 def LabsSettingsForm(localizer):
467 _ = localizer
467 _ = localizer
468
468
469 class _LabSettingsForm(formencode.Schema):
469 class _LabSettingsForm(formencode.Schema):
470 allow_extra_fields = True
470 allow_extra_fields = True
471 filter_extra_fields = False
471 filter_extra_fields = False
472 return _LabSettingsForm
472 return _LabSettingsForm
473
473
474
474
475 def ApplicationPermissionsForm(
475 def ApplicationPermissionsForm(
476 localizer, register_choices, password_reset_choices,
476 localizer, register_choices, password_reset_choices,
477 extern_activate_choices):
477 extern_activate_choices):
478 _ = localizer
478 _ = localizer
479
479
480 class _DefaultPermissionsForm(formencode.Schema):
480 class _DefaultPermissionsForm(formencode.Schema):
481 allow_extra_fields = True
481 allow_extra_fields = True
482 filter_extra_fields = True
482 filter_extra_fields = True
483
483
484 anonymous = v.StringBoolean(if_missing=False)
484 anonymous = v.StringBoolean(if_missing=False)
485 default_register = v.OneOf(register_choices)
485 default_register = v.OneOf(register_choices)
486 default_register_message = v.UnicodeString()
486 default_register_message = v.UnicodeString()
487 default_password_reset = v.OneOf(password_reset_choices)
487 default_password_reset = v.OneOf(password_reset_choices)
488 default_extern_activate = v.OneOf(extern_activate_choices)
488 default_extern_activate = v.OneOf(extern_activate_choices)
489 return _DefaultPermissionsForm
489 return _DefaultPermissionsForm
490
490
491
491
492 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
492 def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices,
493 user_group_perms_choices):
493 user_group_perms_choices):
494 _ = localizer
494 _ = localizer
495
495
496 class _ObjectPermissionsForm(formencode.Schema):
496 class _ObjectPermissionsForm(formencode.Schema):
497 allow_extra_fields = True
497 allow_extra_fields = True
498 filter_extra_fields = True
498 filter_extra_fields = True
499 overwrite_default_repo = v.StringBoolean(if_missing=False)
499 overwrite_default_repo = v.StringBoolean(if_missing=False)
500 overwrite_default_group = v.StringBoolean(if_missing=False)
500 overwrite_default_group = v.StringBoolean(if_missing=False)
501 overwrite_default_user_group = v.StringBoolean(if_missing=False)
501 overwrite_default_user_group = v.StringBoolean(if_missing=False)
502 default_repo_perm = v.OneOf(repo_perms_choices)
502 default_repo_perm = v.OneOf(repo_perms_choices)
503 default_group_perm = v.OneOf(group_perms_choices)
503 default_group_perm = v.OneOf(group_perms_choices)
504 default_user_group_perm = v.OneOf(user_group_perms_choices)
504 default_user_group_perm = v.OneOf(user_group_perms_choices)
505 return _ObjectPermissionsForm
505 return _ObjectPermissionsForm
506
506
507
507
508 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
508 def UserPermissionsForm(localizer, create_choices, create_on_write_choices,
509 repo_group_create_choices, user_group_create_choices,
509 repo_group_create_choices, user_group_create_choices,
510 fork_choices, inherit_default_permissions_choices):
510 fork_choices, inherit_default_permissions_choices):
511 _ = localizer
511 _ = localizer
512
512
513 class _DefaultPermissionsForm(formencode.Schema):
513 class _DefaultPermissionsForm(formencode.Schema):
514 allow_extra_fields = True
514 allow_extra_fields = True
515 filter_extra_fields = True
515 filter_extra_fields = True
516
516
517 anonymous = v.StringBoolean(if_missing=False)
517 anonymous = v.StringBoolean(if_missing=False)
518
518
519 default_repo_create = v.OneOf(create_choices)
519 default_repo_create = v.OneOf(create_choices)
520 default_repo_create_on_write = v.OneOf(create_on_write_choices)
520 default_repo_create_on_write = v.OneOf(create_on_write_choices)
521 default_user_group_create = v.OneOf(user_group_create_choices)
521 default_user_group_create = v.OneOf(user_group_create_choices)
522 default_repo_group_create = v.OneOf(repo_group_create_choices)
522 default_repo_group_create = v.OneOf(repo_group_create_choices)
523 default_fork_create = v.OneOf(fork_choices)
523 default_fork_create = v.OneOf(fork_choices)
524 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
524 default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices)
525 return _DefaultPermissionsForm
525 return _DefaultPermissionsForm
526
526
527
527
528 def UserIndividualPermissionsForm(localizer):
528 def UserIndividualPermissionsForm(localizer):
529 _ = localizer
529 _ = localizer
530
530
531 class _DefaultPermissionsForm(formencode.Schema):
531 class _DefaultPermissionsForm(formencode.Schema):
532 allow_extra_fields = True
532 allow_extra_fields = True
533 filter_extra_fields = True
533 filter_extra_fields = True
534
534
535 inherit_default_permissions = v.StringBoolean(if_missing=False)
535 inherit_default_permissions = v.StringBoolean(if_missing=False)
536 return _DefaultPermissionsForm
536 return _DefaultPermissionsForm
537
537
538
538
539 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
539 def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()):
540 _ = localizer
540 _ = localizer
541 old_data = old_data or {}
541 old_data = old_data or {}
542
542
543 class _DefaultsForm(formencode.Schema):
543 class _DefaultsForm(formencode.Schema):
544 allow_extra_fields = True
544 allow_extra_fields = True
545 filter_extra_fields = True
545 filter_extra_fields = True
546 default_repo_type = v.OneOf(supported_backends)
546 default_repo_type = v.OneOf(supported_backends)
547 default_repo_private = v.StringBoolean(if_missing=False)
547 default_repo_private = v.StringBoolean(if_missing=False)
548 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
548 default_repo_enable_statistics = v.StringBoolean(if_missing=False)
549 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
549 default_repo_enable_downloads = v.StringBoolean(if_missing=False)
550 default_repo_enable_locking = v.StringBoolean(if_missing=False)
550 default_repo_enable_locking = v.StringBoolean(if_missing=False)
551 return _DefaultsForm
551 return _DefaultsForm
552
552
553
553
554 def AuthSettingsForm(localizer):
554 def AuthSettingsForm(localizer):
555 _ = localizer
555 _ = localizer
556
556
557 class _AuthSettingsForm(formencode.Schema):
557 class _AuthSettingsForm(formencode.Schema):
558 allow_extra_fields = True
558 allow_extra_fields = True
559 filter_extra_fields = True
559 filter_extra_fields = True
560 auth_plugins = All(v.ValidAuthPlugins(localizer),
560 auth_plugins = All(v.ValidAuthPlugins(localizer),
561 v.UniqueListFromString(localizer)(not_empty=True))
561 v.UniqueListFromString(localizer)(not_empty=True))
562 return _AuthSettingsForm
562 return _AuthSettingsForm
563
563
564
564
565 def UserExtraEmailForm(localizer):
565 def UserExtraEmailForm(localizer):
566 _ = localizer
566 _ = localizer
567
567
568 class _UserExtraEmailForm(formencode.Schema):
568 class _UserExtraEmailForm(formencode.Schema):
569 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
569 email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True))
570 return _UserExtraEmailForm
570 return _UserExtraEmailForm
571
571
572
572
573 def UserExtraIpForm(localizer):
573 def UserExtraIpForm(localizer):
574 _ = localizer
574 _ = localizer
575
575
576 class _UserExtraIpForm(formencode.Schema):
576 class _UserExtraIpForm(formencode.Schema):
577 ip = v.ValidIp(localizer)(not_empty=True)
577 ip = v.ValidIp(localizer)(not_empty=True)
578 return _UserExtraIpForm
578 return _UserExtraIpForm
579
579
580
580
581 def PullRequestForm(localizer, repo_id):
581 def PullRequestForm(localizer, repo_id):
582 _ = localizer
582 _ = localizer
583
583
584 class ReviewerForm(formencode.Schema):
584 class ReviewerForm(formencode.Schema):
585 user_id = v.Int(not_empty=True)
585 user_id = v.Int(not_empty=True)
586 reasons = All()
586 reasons = All()
587 rules = All(v.UniqueList(localizer, convert=int)())
587 mandatory = v.StringBoolean()
588 mandatory = v.StringBoolean()
588
589
589 class _PullRequestForm(formencode.Schema):
590 class _PullRequestForm(formencode.Schema):
590 allow_extra_fields = True
591 allow_extra_fields = True
591 filter_extra_fields = True
592 filter_extra_fields = True
592
593
593 common_ancestor = v.UnicodeString(strip=True, required=True)
594 common_ancestor = v.UnicodeString(strip=True, required=True)
594 source_repo = v.UnicodeString(strip=True, required=True)
595 source_repo = v.UnicodeString(strip=True, required=True)
595 source_ref = v.UnicodeString(strip=True, required=True)
596 source_ref = v.UnicodeString(strip=True, required=True)
596 target_repo = v.UnicodeString(strip=True, required=True)
597 target_repo = v.UnicodeString(strip=True, required=True)
597 target_ref = v.UnicodeString(strip=True, required=True)
598 target_ref = v.UnicodeString(strip=True, required=True)
598 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
599 revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(),
599 v.UniqueList(localizer)(not_empty=True))
600 v.UniqueList(localizer)(not_empty=True))
600 review_members = formencode.ForEach(ReviewerForm())
601 review_members = formencode.ForEach(ReviewerForm())
601 pullrequest_title = v.UnicodeString(strip=True, required=True, min=3, max=255)
602 pullrequest_title = v.UnicodeString(strip=True, required=True, min=3, max=255)
602 pullrequest_desc = v.UnicodeString(strip=True, required=False)
603 pullrequest_desc = v.UnicodeString(strip=True, required=False)
603
604
604 return _PullRequestForm
605 return _PullRequestForm
605
606
606
607
607 def IssueTrackerPatternsForm(localizer):
608 def IssueTrackerPatternsForm(localizer):
608 _ = localizer
609 _ = localizer
609
610
610 class _IssueTrackerPatternsForm(formencode.Schema):
611 class _IssueTrackerPatternsForm(formencode.Schema):
611 allow_extra_fields = True
612 allow_extra_fields = True
612 filter_extra_fields = False
613 filter_extra_fields = False
613 chained_validators = [v.ValidPattern(localizer)]
614 chained_validators = [v.ValidPattern(localizer)]
614 return _IssueTrackerPatternsForm
615 return _IssueTrackerPatternsForm
@@ -1,1654 +1,1681 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2017 RhodeCode GmbH
3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.translation import lazy_ugettext#, _
36 from rhodecode.translation import lazy_ugettext#, _
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.markup_renderer import (
41 from rhodecode.lib.markup_renderer import (
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.vcs.backends.base import (
44 from rhodecode.lib.vcs.backends.base import (
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.exceptions import (
47 from rhodecode.lib.vcs.exceptions import (
48 CommitDoesNotExistError, EmptyRepositoryError)
48 CommitDoesNotExistError, EmptyRepositoryError)
49 from rhodecode.model import BaseModel
49 from rhodecode.model import BaseModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.notification import NotificationModel, \
56 from rhodecode.model.notification import NotificationModel, \
57 EmailNotificationModel
57 EmailNotificationModel
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 # Data structure to hold the response data when updating commits during a pull
65 # Data structure to hold the response data when updating commits during a pull
66 # request update.
66 # request update.
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 'executed', 'reason', 'new', 'old', 'changes',
68 'executed', 'reason', 'new', 'old', 'changes',
69 'source_changed', 'target_changed'])
69 'source_changed', 'target_changed'])
70
70
71
71
72 class PullRequestModel(BaseModel):
72 class PullRequestModel(BaseModel):
73
73
74 cls = PullRequest
74 cls = PullRequest
75
75
76 DIFF_CONTEXT = 3
76 DIFF_CONTEXT = 3
77
77
78 MERGE_STATUS_MESSAGES = {
78 MERGE_STATUS_MESSAGES = {
79 MergeFailureReason.NONE: lazy_ugettext(
79 MergeFailureReason.NONE: lazy_ugettext(
80 'This pull request can be automatically merged.'),
80 'This pull request can be automatically merged.'),
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 'This pull request cannot be merged because of an unhandled'
82 'This pull request cannot be merged because of an unhandled'
83 ' exception.'),
83 ' exception.'),
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 'This pull request cannot be merged because of merge conflicts.'),
85 'This pull request cannot be merged because of merge conflicts.'),
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 'This pull request could not be merged because push to target'
87 'This pull request could not be merged because push to target'
88 ' failed.'),
88 ' failed.'),
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 'This pull request cannot be merged because the target is not a'
90 'This pull request cannot be merged because the target is not a'
91 ' head.'),
91 ' head.'),
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 'This pull request cannot be merged because the source contains'
93 'This pull request cannot be merged because the source contains'
94 ' more branches than the target.'),
94 ' more branches than the target.'),
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 'This pull request cannot be merged because the target has'
96 'This pull request cannot be merged because the target has'
97 ' multiple heads.'),
97 ' multiple heads.'),
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 'This pull request cannot be merged because the target repository'
99 'This pull request cannot be merged because the target repository'
100 ' is locked.'),
100 ' is locked.'),
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 'This pull request cannot be merged because the target or the '
102 'This pull request cannot be merged because the target or the '
103 'source reference is missing.'),
103 'source reference is missing.'),
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 'This pull request cannot be merged because the target '
105 'This pull request cannot be merged because the target '
106 'reference is missing.'),
106 'reference is missing.'),
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 'This pull request cannot be merged because the source '
108 'This pull request cannot be merged because the source '
109 'reference is missing.'),
109 'reference is missing.'),
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 'This pull request cannot be merged because of conflicts related '
111 'This pull request cannot be merged because of conflicts related '
112 'to sub repositories.'),
112 'to sub repositories.'),
113 }
113 }
114
114
115 UPDATE_STATUS_MESSAGES = {
115 UPDATE_STATUS_MESSAGES = {
116 UpdateFailureReason.NONE: lazy_ugettext(
116 UpdateFailureReason.NONE: lazy_ugettext(
117 'Pull request update successful.'),
117 'Pull request update successful.'),
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 'Pull request update failed because of an unknown error.'),
119 'Pull request update failed because of an unknown error.'),
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 'No update needed because the source and target have not changed.'),
121 'No update needed because the source and target have not changed.'),
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 'Pull request cannot be updated because the reference type is '
123 'Pull request cannot be updated because the reference type is '
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 'This pull request cannot be updated because the target '
126 'This pull request cannot be updated because the target '
127 'reference is missing.'),
127 'reference is missing.'),
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 'This pull request cannot be updated because the source '
129 'This pull request cannot be updated because the source '
130 'reference is missing.'),
130 'reference is missing.'),
131 }
131 }
132
132
133 def __get_pull_request(self, pull_request):
133 def __get_pull_request(self, pull_request):
134 return self._get_instance((
134 return self._get_instance((
135 PullRequest, PullRequestVersion), pull_request)
135 PullRequest, PullRequestVersion), pull_request)
136
136
137 def _check_perms(self, perms, pull_request, user, api=False):
137 def _check_perms(self, perms, pull_request, user, api=False):
138 if not api:
138 if not api:
139 return h.HasRepoPermissionAny(*perms)(
139 return h.HasRepoPermissionAny(*perms)(
140 user=user, repo_name=pull_request.target_repo.repo_name)
140 user=user, repo_name=pull_request.target_repo.repo_name)
141 else:
141 else:
142 return h.HasRepoPermissionAnyApi(*perms)(
142 return h.HasRepoPermissionAnyApi(*perms)(
143 user=user, repo_name=pull_request.target_repo.repo_name)
143 user=user, repo_name=pull_request.target_repo.repo_name)
144
144
145 def check_user_read(self, pull_request, user, api=False):
145 def check_user_read(self, pull_request, user, api=False):
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 return self._check_perms(_perms, pull_request, user, api)
147 return self._check_perms(_perms, pull_request, user, api)
148
148
149 def check_user_merge(self, pull_request, user, api=False):
149 def check_user_merge(self, pull_request, user, api=False):
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 return self._check_perms(_perms, pull_request, user, api)
151 return self._check_perms(_perms, pull_request, user, api)
152
152
153 def check_user_update(self, pull_request, user, api=False):
153 def check_user_update(self, pull_request, user, api=False):
154 owner = user.user_id == pull_request.user_id
154 owner = user.user_id == pull_request.user_id
155 return self.check_user_merge(pull_request, user, api) or owner
155 return self.check_user_merge(pull_request, user, api) or owner
156
156
157 def check_user_delete(self, pull_request, user):
157 def check_user_delete(self, pull_request, user):
158 owner = user.user_id == pull_request.user_id
158 owner = user.user_id == pull_request.user_id
159 _perms = ('repository.admin',)
159 _perms = ('repository.admin',)
160 return self._check_perms(_perms, pull_request, user) or owner
160 return self._check_perms(_perms, pull_request, user) or owner
161
161
162 def check_user_change_status(self, pull_request, user, api=False):
162 def check_user_change_status(self, pull_request, user, api=False):
163 reviewer = user.user_id in [x.user_id for x in
163 reviewer = user.user_id in [x.user_id for x in
164 pull_request.reviewers]
164 pull_request.reviewers]
165 return self.check_user_update(pull_request, user, api) or reviewer
165 return self.check_user_update(pull_request, user, api) or reviewer
166
166
167 def check_user_comment(self, pull_request, user):
167 def check_user_comment(self, pull_request, user):
168 owner = user.user_id == pull_request.user_id
168 owner = user.user_id == pull_request.user_id
169 return self.check_user_read(pull_request, user) or owner
169 return self.check_user_read(pull_request, user) or owner
170
170
171 def get(self, pull_request):
171 def get(self, pull_request):
172 return self.__get_pull_request(pull_request)
172 return self.__get_pull_request(pull_request)
173
173
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 opened_by=None, order_by=None,
175 opened_by=None, order_by=None,
176 order_dir='desc'):
176 order_dir='desc'):
177 repo = None
177 repo = None
178 if repo_name:
178 if repo_name:
179 repo = self._get_repo(repo_name)
179 repo = self._get_repo(repo_name)
180
180
181 q = PullRequest.query()
181 q = PullRequest.query()
182
182
183 # source or target
183 # source or target
184 if repo and source:
184 if repo and source:
185 q = q.filter(PullRequest.source_repo == repo)
185 q = q.filter(PullRequest.source_repo == repo)
186 elif repo:
186 elif repo:
187 q = q.filter(PullRequest.target_repo == repo)
187 q = q.filter(PullRequest.target_repo == repo)
188
188
189 # closed,opened
189 # closed,opened
190 if statuses:
190 if statuses:
191 q = q.filter(PullRequest.status.in_(statuses))
191 q = q.filter(PullRequest.status.in_(statuses))
192
192
193 # opened by filter
193 # opened by filter
194 if opened_by:
194 if opened_by:
195 q = q.filter(PullRequest.user_id.in_(opened_by))
195 q = q.filter(PullRequest.user_id.in_(opened_by))
196
196
197 if order_by:
197 if order_by:
198 order_map = {
198 order_map = {
199 'name_raw': PullRequest.pull_request_id,
199 'name_raw': PullRequest.pull_request_id,
200 'title': PullRequest.title,
200 'title': PullRequest.title,
201 'updated_on_raw': PullRequest.updated_on,
201 'updated_on_raw': PullRequest.updated_on,
202 'target_repo': PullRequest.target_repo_id
202 'target_repo': PullRequest.target_repo_id
203 }
203 }
204 if order_dir == 'asc':
204 if order_dir == 'asc':
205 q = q.order_by(order_map[order_by].asc())
205 q = q.order_by(order_map[order_by].asc())
206 else:
206 else:
207 q = q.order_by(order_map[order_by].desc())
207 q = q.order_by(order_map[order_by].desc())
208
208
209 return q
209 return q
210
210
211 def count_all(self, repo_name, source=False, statuses=None,
211 def count_all(self, repo_name, source=False, statuses=None,
212 opened_by=None):
212 opened_by=None):
213 """
213 """
214 Count the number of pull requests for a specific repository.
214 Count the number of pull requests for a specific repository.
215
215
216 :param repo_name: target or source repo
216 :param repo_name: target or source repo
217 :param source: boolean flag to specify if repo_name refers to source
217 :param source: boolean flag to specify if repo_name refers to source
218 :param statuses: list of pull request statuses
218 :param statuses: list of pull request statuses
219 :param opened_by: author user of the pull request
219 :param opened_by: author user of the pull request
220 :returns: int number of pull requests
220 :returns: int number of pull requests
221 """
221 """
222 q = self._prepare_get_all_query(
222 q = self._prepare_get_all_query(
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224
224
225 return q.count()
225 return q.count()
226
226
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 offset=0, length=None, order_by=None, order_dir='desc'):
228 offset=0, length=None, order_by=None, order_dir='desc'):
229 """
229 """
230 Get all pull requests for a specific repository.
230 Get all pull requests for a specific repository.
231
231
232 :param repo_name: target or source repo
232 :param repo_name: target or source repo
233 :param source: boolean flag to specify if repo_name refers to source
233 :param source: boolean flag to specify if repo_name refers to source
234 :param statuses: list of pull request statuses
234 :param statuses: list of pull request statuses
235 :param opened_by: author user of the pull request
235 :param opened_by: author user of the pull request
236 :param offset: pagination offset
236 :param offset: pagination offset
237 :param length: length of returned list
237 :param length: length of returned list
238 :param order_by: order of the returned list
238 :param order_by: order of the returned list
239 :param order_dir: 'asc' or 'desc' ordering direction
239 :param order_dir: 'asc' or 'desc' ordering direction
240 :returns: list of pull requests
240 :returns: list of pull requests
241 """
241 """
242 q = self._prepare_get_all_query(
242 q = self._prepare_get_all_query(
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 order_by=order_by, order_dir=order_dir)
244 order_by=order_by, order_dir=order_dir)
245
245
246 if length:
246 if length:
247 pull_requests = q.limit(length).offset(offset).all()
247 pull_requests = q.limit(length).offset(offset).all()
248 else:
248 else:
249 pull_requests = q.all()
249 pull_requests = q.all()
250
250
251 return pull_requests
251 return pull_requests
252
252
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 opened_by=None):
254 opened_by=None):
255 """
255 """
256 Count the number of pull requests for a specific repository that are
256 Count the number of pull requests for a specific repository that are
257 awaiting review.
257 awaiting review.
258
258
259 :param repo_name: target or source repo
259 :param repo_name: target or source repo
260 :param source: boolean flag to specify if repo_name refers to source
260 :param source: boolean flag to specify if repo_name refers to source
261 :param statuses: list of pull request statuses
261 :param statuses: list of pull request statuses
262 :param opened_by: author user of the pull request
262 :param opened_by: author user of the pull request
263 :returns: int number of pull requests
263 :returns: int number of pull requests
264 """
264 """
265 pull_requests = self.get_awaiting_review(
265 pull_requests = self.get_awaiting_review(
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267
267
268 return len(pull_requests)
268 return len(pull_requests)
269
269
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 opened_by=None, offset=0, length=None,
271 opened_by=None, offset=0, length=None,
272 order_by=None, order_dir='desc'):
272 order_by=None, order_dir='desc'):
273 """
273 """
274 Get all pull requests for a specific repository that are awaiting
274 Get all pull requests for a specific repository that are awaiting
275 review.
275 review.
276
276
277 :param repo_name: target or source repo
277 :param repo_name: target or source repo
278 :param source: boolean flag to specify if repo_name refers to source
278 :param source: boolean flag to specify if repo_name refers to source
279 :param statuses: list of pull request statuses
279 :param statuses: list of pull request statuses
280 :param opened_by: author user of the pull request
280 :param opened_by: author user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _filtered_pull_requests = []
291 _filtered_pull_requests = []
292 for pr in pull_requests:
292 for pr in pull_requests:
293 status = pr.calculated_review_status()
293 status = pr.calculated_review_status()
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 _filtered_pull_requests.append(pr)
296 _filtered_pull_requests.append(pr)
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 opened_by=None, user_id=None):
303 opened_by=None, user_id=None):
304 """
304 """
305 Count the number of pull requests for a specific repository that are
305 Count the number of pull requests for a specific repository that are
306 awaiting review from a specific user.
306 awaiting review from a specific user.
307
307
308 :param repo_name: target or source repo
308 :param repo_name: target or source repo
309 :param source: boolean flag to specify if repo_name refers to source
309 :param source: boolean flag to specify if repo_name refers to source
310 :param statuses: list of pull request statuses
310 :param statuses: list of pull request statuses
311 :param opened_by: author user of the pull request
311 :param opened_by: author user of the pull request
312 :param user_id: reviewer user of the pull request
312 :param user_id: reviewer user of the pull request
313 :returns: int number of pull requests
313 :returns: int number of pull requests
314 """
314 """
315 pull_requests = self.get_awaiting_my_review(
315 pull_requests = self.get_awaiting_my_review(
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 user_id=user_id)
317 user_id=user_id)
318
318
319 return len(pull_requests)
319 return len(pull_requests)
320
320
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 opened_by=None, user_id=None, offset=0,
322 opened_by=None, user_id=None, offset=0,
323 length=None, order_by=None, order_dir='desc'):
323 length=None, order_by=None, order_dir='desc'):
324 """
324 """
325 Get all pull requests for a specific repository that are awaiting
325 Get all pull requests for a specific repository that are awaiting
326 review from a specific user.
326 review from a specific user.
327
327
328 :param repo_name: target or source repo
328 :param repo_name: target or source repo
329 :param source: boolean flag to specify if repo_name refers to source
329 :param source: boolean flag to specify if repo_name refers to source
330 :param statuses: list of pull request statuses
330 :param statuses: list of pull request statuses
331 :param opened_by: author user of the pull request
331 :param opened_by: author user of the pull request
332 :param user_id: reviewer user of the pull request
332 :param user_id: reviewer user of the pull request
333 :param offset: pagination offset
333 :param offset: pagination offset
334 :param length: length of returned list
334 :param length: length of returned list
335 :param order_by: order of the returned list
335 :param order_by: order of the returned list
336 :param order_dir: 'asc' or 'desc' ordering direction
336 :param order_dir: 'asc' or 'desc' ordering direction
337 :returns: list of pull requests
337 :returns: list of pull requests
338 """
338 """
339 pull_requests = self.get_all(
339 pull_requests = self.get_all(
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 order_by=order_by, order_dir=order_dir)
341 order_by=order_by, order_dir=order_dir)
342
342
343 _my = PullRequestModel().get_not_reviewed(user_id)
343 _my = PullRequestModel().get_not_reviewed(user_id)
344 my_participation = []
344 my_participation = []
345 for pr in pull_requests:
345 for pr in pull_requests:
346 if pr in _my:
346 if pr in _my:
347 my_participation.append(pr)
347 my_participation.append(pr)
348 _filtered_pull_requests = my_participation
348 _filtered_pull_requests = my_participation
349 if length:
349 if length:
350 return _filtered_pull_requests[offset:offset+length]
350 return _filtered_pull_requests[offset:offset+length]
351 else:
351 else:
352 return _filtered_pull_requests
352 return _filtered_pull_requests
353
353
354 def get_not_reviewed(self, user_id):
354 def get_not_reviewed(self, user_id):
355 return [
355 return [
356 x.pull_request for x in PullRequestReviewers.query().filter(
356 x.pull_request for x in PullRequestReviewers.query().filter(
357 PullRequestReviewers.user_id == user_id).all()
357 PullRequestReviewers.user_id == user_id).all()
358 ]
358 ]
359
359
360 def _prepare_participating_query(self, user_id=None, statuses=None,
360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 order_by=None, order_dir='desc'):
361 order_by=None, order_dir='desc'):
362 q = PullRequest.query()
362 q = PullRequest.query()
363 if user_id:
363 if user_id:
364 reviewers_subquery = Session().query(
364 reviewers_subquery = Session().query(
365 PullRequestReviewers.pull_request_id).filter(
365 PullRequestReviewers.pull_request_id).filter(
366 PullRequestReviewers.user_id == user_id).subquery()
366 PullRequestReviewers.user_id == user_id).subquery()
367 user_filter = or_(
367 user_filter = or_(
368 PullRequest.user_id == user_id,
368 PullRequest.user_id == user_id,
369 PullRequest.pull_request_id.in_(reviewers_subquery)
369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 )
370 )
371 q = PullRequest.query().filter(user_filter)
371 q = PullRequest.query().filter(user_filter)
372
372
373 # closed,opened
373 # closed,opened
374 if statuses:
374 if statuses:
375 q = q.filter(PullRequest.status.in_(statuses))
375 q = q.filter(PullRequest.status.in_(statuses))
376
376
377 if order_by:
377 if order_by:
378 order_map = {
378 order_map = {
379 'name_raw': PullRequest.pull_request_id,
379 'name_raw': PullRequest.pull_request_id,
380 'title': PullRequest.title,
380 'title': PullRequest.title,
381 'updated_on_raw': PullRequest.updated_on,
381 'updated_on_raw': PullRequest.updated_on,
382 'target_repo': PullRequest.target_repo_id
382 'target_repo': PullRequest.target_repo_id
383 }
383 }
384 if order_dir == 'asc':
384 if order_dir == 'asc':
385 q = q.order_by(order_map[order_by].asc())
385 q = q.order_by(order_map[order_by].asc())
386 else:
386 else:
387 q = q.order_by(order_map[order_by].desc())
387 q = q.order_by(order_map[order_by].desc())
388
388
389 return q
389 return q
390
390
391 def count_im_participating_in(self, user_id=None, statuses=None):
391 def count_im_participating_in(self, user_id=None, statuses=None):
392 q = self._prepare_participating_query(user_id, statuses=statuses)
392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 return q.count()
393 return q.count()
394
394
395 def get_im_participating_in(
395 def get_im_participating_in(
396 self, user_id=None, statuses=None, offset=0,
396 self, user_id=None, statuses=None, offset=0,
397 length=None, order_by=None, order_dir='desc'):
397 length=None, order_by=None, order_dir='desc'):
398 """
398 """
399 Get all Pull requests that i'm participating in, or i have opened
399 Get all Pull requests that i'm participating in, or i have opened
400 """
400 """
401
401
402 q = self._prepare_participating_query(
402 q = self._prepare_participating_query(
403 user_id, statuses=statuses, order_by=order_by,
403 user_id, statuses=statuses, order_by=order_by,
404 order_dir=order_dir)
404 order_dir=order_dir)
405
405
406 if length:
406 if length:
407 pull_requests = q.limit(length).offset(offset).all()
407 pull_requests = q.limit(length).offset(offset).all()
408 else:
408 else:
409 pull_requests = q.all()
409 pull_requests = q.all()
410
410
411 return pull_requests
411 return pull_requests
412
412
413 def get_versions(self, pull_request):
413 def get_versions(self, pull_request):
414 """
414 """
415 returns version of pull request sorted by ID descending
415 returns version of pull request sorted by ID descending
416 """
416 """
417 return PullRequestVersion.query()\
417 return PullRequestVersion.query()\
418 .filter(PullRequestVersion.pull_request == pull_request)\
418 .filter(PullRequestVersion.pull_request == pull_request)\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 .all()
420 .all()
421
421
422 def get_pr_version(self, pull_request_id, version=None):
422 def get_pr_version(self, pull_request_id, version=None):
423 at_version = None
423 at_version = None
424
424
425 if version and version == 'latest':
425 if version and version == 'latest':
426 pull_request_ver = PullRequest.get(pull_request_id)
426 pull_request_ver = PullRequest.get(pull_request_id)
427 pull_request_obj = pull_request_ver
427 pull_request_obj = pull_request_ver
428 _org_pull_request_obj = pull_request_obj
428 _org_pull_request_obj = pull_request_obj
429 at_version = 'latest'
429 at_version = 'latest'
430 elif version:
430 elif version:
431 pull_request_ver = PullRequestVersion.get_or_404(version)
431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 pull_request_obj = pull_request_ver
432 pull_request_obj = pull_request_ver
433 _org_pull_request_obj = pull_request_ver.pull_request
433 _org_pull_request_obj = pull_request_ver.pull_request
434 at_version = pull_request_ver.pull_request_version_id
434 at_version = pull_request_ver.pull_request_version_id
435 else:
435 else:
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 pull_request_id)
437 pull_request_id)
438
438
439 pull_request_display_obj = PullRequest.get_pr_display_object(
439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 pull_request_obj, _org_pull_request_obj)
440 pull_request_obj, _org_pull_request_obj)
441
441
442 return _org_pull_request_obj, pull_request_obj, \
442 return _org_pull_request_obj, pull_request_obj, \
443 pull_request_display_obj, at_version
443 pull_request_display_obj, at_version
444
444
445 def create(self, created_by, source_repo, source_ref, target_repo,
445 def create(self, created_by, source_repo, source_ref, target_repo,
446 target_ref, revisions, reviewers, title, description=None,
446 target_ref, revisions, reviewers, title, description=None,
447 reviewer_data=None, translator=None):
447 reviewer_data=None, translator=None):
448 translator = translator or get_current_request().translate
448 translator = translator or get_current_request().translate
449
449
450 created_by_user = self._get_user(created_by)
450 created_by_user = self._get_user(created_by)
451 source_repo = self._get_repo(source_repo)
451 source_repo = self._get_repo(source_repo)
452 target_repo = self._get_repo(target_repo)
452 target_repo = self._get_repo(target_repo)
453
453
454 pull_request = PullRequest()
454 pull_request = PullRequest()
455 pull_request.source_repo = source_repo
455 pull_request.source_repo = source_repo
456 pull_request.source_ref = source_ref
456 pull_request.source_ref = source_ref
457 pull_request.target_repo = target_repo
457 pull_request.target_repo = target_repo
458 pull_request.target_ref = target_ref
458 pull_request.target_ref = target_ref
459 pull_request.revisions = revisions
459 pull_request.revisions = revisions
460 pull_request.title = title
460 pull_request.title = title
461 pull_request.description = description
461 pull_request.description = description
462 pull_request.author = created_by_user
462 pull_request.author = created_by_user
463 pull_request.reviewer_data = reviewer_data
463 pull_request.reviewer_data = reviewer_data
464
464
465 Session().add(pull_request)
465 Session().add(pull_request)
466 Session().flush()
466 Session().flush()
467
467
468 reviewer_ids = set()
468 reviewer_ids = set()
469 # members / reviewers
469 # members / reviewers
470 for reviewer_object in reviewers:
470 for reviewer_object in reviewers:
471 user_id, reasons, mandatory = reviewer_object
471 user_id, reasons, mandatory, rules = reviewer_object
472 user = self._get_user(user_id)
472 user = self._get_user(user_id)
473
473
474 # skip duplicates
474 # skip duplicates
475 if user.user_id in reviewer_ids:
475 if user.user_id in reviewer_ids:
476 continue
476 continue
477
477
478 reviewer_ids.add(user.user_id)
478 reviewer_ids.add(user.user_id)
479
479
480 reviewer = PullRequestReviewers()
480 reviewer = PullRequestReviewers()
481 reviewer.user = user
481 reviewer.user = user
482 reviewer.pull_request = pull_request
482 reviewer.pull_request = pull_request
483 reviewer.reasons = reasons
483 reviewer.reasons = reasons
484 reviewer.mandatory = mandatory
484 reviewer.mandatory = mandatory
485
486 # NOTE(marcink): pick only first rule for now
487 rule_id = rules[0] if rules else None
488 rule = RepoReviewRule.get(rule_id) if rule_id else None
489 if rule:
490 review_group = rule.user_group_vote_rule()
491 if review_group:
492 # NOTE(marcink):
493 # again, can be that user is member of more,
494 # but we pick the first same, as default reviewers algo
495 review_group = review_group[0]
496
497 rule_data = {
498 'rule_name':
499 rule.review_rule_name,
500 'rule_user_group_entry_id':
501 review_group.repo_review_rule_users_group_id,
502 'rule_user_group_name':
503 review_group.users_group.users_group_name,
504 'rule_user_group_members':
505 [x.user.username for x in review_group.users_group.members],
506 }
507 # e.g {'vote_rule': -1, 'mandatory': True}
508 rule_data.update(review_group.rule_data())
509
510 reviewer.rule_data = rule_data
511
485 Session().add(reviewer)
512 Session().add(reviewer)
486
513
487 # Set approval status to "Under Review" for all commits which are
514 # Set approval status to "Under Review" for all commits which are
488 # part of this pull request.
515 # part of this pull request.
489 ChangesetStatusModel().set_status(
516 ChangesetStatusModel().set_status(
490 repo=target_repo,
517 repo=target_repo,
491 status=ChangesetStatus.STATUS_UNDER_REVIEW,
518 status=ChangesetStatus.STATUS_UNDER_REVIEW,
492 user=created_by_user,
519 user=created_by_user,
493 pull_request=pull_request
520 pull_request=pull_request
494 )
521 )
495
522
496 MergeCheck.validate(
523 MergeCheck.validate(
497 pull_request, user=created_by_user, translator=translator)
524 pull_request, user=created_by_user, translator=translator)
498
525
499 self.notify_reviewers(pull_request, reviewer_ids)
526 self.notify_reviewers(pull_request, reviewer_ids)
500 self._trigger_pull_request_hook(
527 self._trigger_pull_request_hook(
501 pull_request, created_by_user, 'create')
528 pull_request, created_by_user, 'create')
502
529
503 creation_data = pull_request.get_api_data(with_merge_state=False)
530 creation_data = pull_request.get_api_data(with_merge_state=False)
504 self._log_audit_action(
531 self._log_audit_action(
505 'repo.pull_request.create', {'data': creation_data},
532 'repo.pull_request.create', {'data': creation_data},
506 created_by_user, pull_request)
533 created_by_user, pull_request)
507
534
508 return pull_request
535 return pull_request
509
536
510 def _trigger_pull_request_hook(self, pull_request, user, action):
537 def _trigger_pull_request_hook(self, pull_request, user, action):
511 pull_request = self.__get_pull_request(pull_request)
538 pull_request = self.__get_pull_request(pull_request)
512 target_scm = pull_request.target_repo.scm_instance()
539 target_scm = pull_request.target_repo.scm_instance()
513 if action == 'create':
540 if action == 'create':
514 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
541 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
515 elif action == 'merge':
542 elif action == 'merge':
516 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
543 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
517 elif action == 'close':
544 elif action == 'close':
518 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
545 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
519 elif action == 'review_status_change':
546 elif action == 'review_status_change':
520 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
547 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
521 elif action == 'update':
548 elif action == 'update':
522 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
549 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
523 else:
550 else:
524 return
551 return
525
552
526 trigger_hook(
553 trigger_hook(
527 username=user.username,
554 username=user.username,
528 repo_name=pull_request.target_repo.repo_name,
555 repo_name=pull_request.target_repo.repo_name,
529 repo_alias=target_scm.alias,
556 repo_alias=target_scm.alias,
530 pull_request=pull_request)
557 pull_request=pull_request)
531
558
532 def _get_commit_ids(self, pull_request):
559 def _get_commit_ids(self, pull_request):
533 """
560 """
534 Return the commit ids of the merged pull request.
561 Return the commit ids of the merged pull request.
535
562
536 This method is not dealing correctly yet with the lack of autoupdates
563 This method is not dealing correctly yet with the lack of autoupdates
537 nor with the implicit target updates.
564 nor with the implicit target updates.
538 For example: if a commit in the source repo is already in the target it
565 For example: if a commit in the source repo is already in the target it
539 will be reported anyways.
566 will be reported anyways.
540 """
567 """
541 merge_rev = pull_request.merge_rev
568 merge_rev = pull_request.merge_rev
542 if merge_rev is None:
569 if merge_rev is None:
543 raise ValueError('This pull request was not merged yet')
570 raise ValueError('This pull request was not merged yet')
544
571
545 commit_ids = list(pull_request.revisions)
572 commit_ids = list(pull_request.revisions)
546 if merge_rev not in commit_ids:
573 if merge_rev not in commit_ids:
547 commit_ids.append(merge_rev)
574 commit_ids.append(merge_rev)
548
575
549 return commit_ids
576 return commit_ids
550
577
551 def merge(self, pull_request, user, extras):
578 def merge(self, pull_request, user, extras):
552 log.debug("Merging pull request %s", pull_request.pull_request_id)
579 log.debug("Merging pull request %s", pull_request.pull_request_id)
553 merge_state = self._merge_pull_request(pull_request, user, extras)
580 merge_state = self._merge_pull_request(pull_request, user, extras)
554 if merge_state.executed:
581 if merge_state.executed:
555 log.debug(
582 log.debug(
556 "Merge was successful, updating the pull request comments.")
583 "Merge was successful, updating the pull request comments.")
557 self._comment_and_close_pr(pull_request, user, merge_state)
584 self._comment_and_close_pr(pull_request, user, merge_state)
558
585
559 self._log_audit_action(
586 self._log_audit_action(
560 'repo.pull_request.merge',
587 'repo.pull_request.merge',
561 {'merge_state': merge_state.__dict__},
588 {'merge_state': merge_state.__dict__},
562 user, pull_request)
589 user, pull_request)
563
590
564 else:
591 else:
565 log.warn("Merge failed, not updating the pull request.")
592 log.warn("Merge failed, not updating the pull request.")
566 return merge_state
593 return merge_state
567
594
568 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
595 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
569 target_vcs = pull_request.target_repo.scm_instance()
596 target_vcs = pull_request.target_repo.scm_instance()
570 source_vcs = pull_request.source_repo.scm_instance()
597 source_vcs = pull_request.source_repo.scm_instance()
571 target_ref = self._refresh_reference(
598 target_ref = self._refresh_reference(
572 pull_request.target_ref_parts, target_vcs)
599 pull_request.target_ref_parts, target_vcs)
573
600
574 message = merge_msg or (
601 message = merge_msg or (
575 'Merge pull request #%(pr_id)s from '
602 'Merge pull request #%(pr_id)s from '
576 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
603 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
577 'pr_id': pull_request.pull_request_id,
604 'pr_id': pull_request.pull_request_id,
578 'source_repo': source_vcs.name,
605 'source_repo': source_vcs.name,
579 'source_ref_name': pull_request.source_ref_parts.name,
606 'source_ref_name': pull_request.source_ref_parts.name,
580 'pr_title': pull_request.title
607 'pr_title': pull_request.title
581 }
608 }
582
609
583 workspace_id = self._workspace_id(pull_request)
610 workspace_id = self._workspace_id(pull_request)
584 use_rebase = self._use_rebase_for_merging(pull_request)
611 use_rebase = self._use_rebase_for_merging(pull_request)
585 close_branch = self._close_branch_before_merging(pull_request)
612 close_branch = self._close_branch_before_merging(pull_request)
586
613
587 callback_daemon, extras = prepare_callback_daemon(
614 callback_daemon, extras = prepare_callback_daemon(
588 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
589 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
616 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
590
617
591 with callback_daemon:
618 with callback_daemon:
592 # TODO: johbo: Implement a clean way to run a config_override
619 # TODO: johbo: Implement a clean way to run a config_override
593 # for a single call.
620 # for a single call.
594 target_vcs.config.set(
621 target_vcs.config.set(
595 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
622 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
596 merge_state = target_vcs.merge(
623 merge_state = target_vcs.merge(
597 target_ref, source_vcs, pull_request.source_ref_parts,
624 target_ref, source_vcs, pull_request.source_ref_parts,
598 workspace_id, user_name=user.username,
625 workspace_id, user_name=user.username,
599 user_email=user.email, message=message, use_rebase=use_rebase,
626 user_email=user.email, message=message, use_rebase=use_rebase,
600 close_branch=close_branch)
627 close_branch=close_branch)
601 return merge_state
628 return merge_state
602
629
603 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
630 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
604 pull_request.merge_rev = merge_state.merge_ref.commit_id
631 pull_request.merge_rev = merge_state.merge_ref.commit_id
605 pull_request.updated_on = datetime.datetime.now()
632 pull_request.updated_on = datetime.datetime.now()
606 close_msg = close_msg or 'Pull request merged and closed'
633 close_msg = close_msg or 'Pull request merged and closed'
607
634
608 CommentsModel().create(
635 CommentsModel().create(
609 text=safe_unicode(close_msg),
636 text=safe_unicode(close_msg),
610 repo=pull_request.target_repo.repo_id,
637 repo=pull_request.target_repo.repo_id,
611 user=user.user_id,
638 user=user.user_id,
612 pull_request=pull_request.pull_request_id,
639 pull_request=pull_request.pull_request_id,
613 f_path=None,
640 f_path=None,
614 line_no=None,
641 line_no=None,
615 closing_pr=True
642 closing_pr=True
616 )
643 )
617
644
618 Session().add(pull_request)
645 Session().add(pull_request)
619 Session().flush()
646 Session().flush()
620 # TODO: paris: replace invalidation with less radical solution
647 # TODO: paris: replace invalidation with less radical solution
621 ScmModel().mark_for_invalidation(
648 ScmModel().mark_for_invalidation(
622 pull_request.target_repo.repo_name)
649 pull_request.target_repo.repo_name)
623 self._trigger_pull_request_hook(pull_request, user, 'merge')
650 self._trigger_pull_request_hook(pull_request, user, 'merge')
624
651
625 def has_valid_update_type(self, pull_request):
652 def has_valid_update_type(self, pull_request):
626 source_ref_type = pull_request.source_ref_parts.type
653 source_ref_type = pull_request.source_ref_parts.type
627 return source_ref_type in ['book', 'branch', 'tag']
654 return source_ref_type in ['book', 'branch', 'tag']
628
655
629 def update_commits(self, pull_request):
656 def update_commits(self, pull_request):
630 """
657 """
631 Get the updated list of commits for the pull request
658 Get the updated list of commits for the pull request
632 and return the new pull request version and the list
659 and return the new pull request version and the list
633 of commits processed by this update action
660 of commits processed by this update action
634 """
661 """
635 pull_request = self.__get_pull_request(pull_request)
662 pull_request = self.__get_pull_request(pull_request)
636 source_ref_type = pull_request.source_ref_parts.type
663 source_ref_type = pull_request.source_ref_parts.type
637 source_ref_name = pull_request.source_ref_parts.name
664 source_ref_name = pull_request.source_ref_parts.name
638 source_ref_id = pull_request.source_ref_parts.commit_id
665 source_ref_id = pull_request.source_ref_parts.commit_id
639
666
640 target_ref_type = pull_request.target_ref_parts.type
667 target_ref_type = pull_request.target_ref_parts.type
641 target_ref_name = pull_request.target_ref_parts.name
668 target_ref_name = pull_request.target_ref_parts.name
642 target_ref_id = pull_request.target_ref_parts.commit_id
669 target_ref_id = pull_request.target_ref_parts.commit_id
643
670
644 if not self.has_valid_update_type(pull_request):
671 if not self.has_valid_update_type(pull_request):
645 log.debug(
672 log.debug(
646 "Skipping update of pull request %s due to ref type: %s",
673 "Skipping update of pull request %s due to ref type: %s",
647 pull_request, source_ref_type)
674 pull_request, source_ref_type)
648 return UpdateResponse(
675 return UpdateResponse(
649 executed=False,
676 executed=False,
650 reason=UpdateFailureReason.WRONG_REF_TYPE,
677 reason=UpdateFailureReason.WRONG_REF_TYPE,
651 old=pull_request, new=None, changes=None,
678 old=pull_request, new=None, changes=None,
652 source_changed=False, target_changed=False)
679 source_changed=False, target_changed=False)
653
680
654 # source repo
681 # source repo
655 source_repo = pull_request.source_repo.scm_instance()
682 source_repo = pull_request.source_repo.scm_instance()
656 try:
683 try:
657 source_commit = source_repo.get_commit(commit_id=source_ref_name)
684 source_commit = source_repo.get_commit(commit_id=source_ref_name)
658 except CommitDoesNotExistError:
685 except CommitDoesNotExistError:
659 return UpdateResponse(
686 return UpdateResponse(
660 executed=False,
687 executed=False,
661 reason=UpdateFailureReason.MISSING_SOURCE_REF,
688 reason=UpdateFailureReason.MISSING_SOURCE_REF,
662 old=pull_request, new=None, changes=None,
689 old=pull_request, new=None, changes=None,
663 source_changed=False, target_changed=False)
690 source_changed=False, target_changed=False)
664
691
665 source_changed = source_ref_id != source_commit.raw_id
692 source_changed = source_ref_id != source_commit.raw_id
666
693
667 # target repo
694 # target repo
668 target_repo = pull_request.target_repo.scm_instance()
695 target_repo = pull_request.target_repo.scm_instance()
669 try:
696 try:
670 target_commit = target_repo.get_commit(commit_id=target_ref_name)
697 target_commit = target_repo.get_commit(commit_id=target_ref_name)
671 except CommitDoesNotExistError:
698 except CommitDoesNotExistError:
672 return UpdateResponse(
699 return UpdateResponse(
673 executed=False,
700 executed=False,
674 reason=UpdateFailureReason.MISSING_TARGET_REF,
701 reason=UpdateFailureReason.MISSING_TARGET_REF,
675 old=pull_request, new=None, changes=None,
702 old=pull_request, new=None, changes=None,
676 source_changed=False, target_changed=False)
703 source_changed=False, target_changed=False)
677 target_changed = target_ref_id != target_commit.raw_id
704 target_changed = target_ref_id != target_commit.raw_id
678
705
679 if not (source_changed or target_changed):
706 if not (source_changed or target_changed):
680 log.debug("Nothing changed in pull request %s", pull_request)
707 log.debug("Nothing changed in pull request %s", pull_request)
681 return UpdateResponse(
708 return UpdateResponse(
682 executed=False,
709 executed=False,
683 reason=UpdateFailureReason.NO_CHANGE,
710 reason=UpdateFailureReason.NO_CHANGE,
684 old=pull_request, new=None, changes=None,
711 old=pull_request, new=None, changes=None,
685 source_changed=target_changed, target_changed=source_changed)
712 source_changed=target_changed, target_changed=source_changed)
686
713
687 change_in_found = 'target repo' if target_changed else 'source repo'
714 change_in_found = 'target repo' if target_changed else 'source repo'
688 log.debug('Updating pull request because of change in %s detected',
715 log.debug('Updating pull request because of change in %s detected',
689 change_in_found)
716 change_in_found)
690
717
691 # Finally there is a need for an update, in case of source change
718 # Finally there is a need for an update, in case of source change
692 # we create a new version, else just an update
719 # we create a new version, else just an update
693 if source_changed:
720 if source_changed:
694 pull_request_version = self._create_version_from_snapshot(pull_request)
721 pull_request_version = self._create_version_from_snapshot(pull_request)
695 self._link_comments_to_version(pull_request_version)
722 self._link_comments_to_version(pull_request_version)
696 else:
723 else:
697 try:
724 try:
698 ver = pull_request.versions[-1]
725 ver = pull_request.versions[-1]
699 except IndexError:
726 except IndexError:
700 ver = None
727 ver = None
701
728
702 pull_request.pull_request_version_id = \
729 pull_request.pull_request_version_id = \
703 ver.pull_request_version_id if ver else None
730 ver.pull_request_version_id if ver else None
704 pull_request_version = pull_request
731 pull_request_version = pull_request
705
732
706 try:
733 try:
707 if target_ref_type in ('tag', 'branch', 'book'):
734 if target_ref_type in ('tag', 'branch', 'book'):
708 target_commit = target_repo.get_commit(target_ref_name)
735 target_commit = target_repo.get_commit(target_ref_name)
709 else:
736 else:
710 target_commit = target_repo.get_commit(target_ref_id)
737 target_commit = target_repo.get_commit(target_ref_id)
711 except CommitDoesNotExistError:
738 except CommitDoesNotExistError:
712 return UpdateResponse(
739 return UpdateResponse(
713 executed=False,
740 executed=False,
714 reason=UpdateFailureReason.MISSING_TARGET_REF,
741 reason=UpdateFailureReason.MISSING_TARGET_REF,
715 old=pull_request, new=None, changes=None,
742 old=pull_request, new=None, changes=None,
716 source_changed=source_changed, target_changed=target_changed)
743 source_changed=source_changed, target_changed=target_changed)
717
744
718 # re-compute commit ids
745 # re-compute commit ids
719 old_commit_ids = pull_request.revisions
746 old_commit_ids = pull_request.revisions
720 pre_load = ["author", "branch", "date", "message"]
747 pre_load = ["author", "branch", "date", "message"]
721 commit_ranges = target_repo.compare(
748 commit_ranges = target_repo.compare(
722 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
749 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
723 pre_load=pre_load)
750 pre_load=pre_load)
724
751
725 ancestor = target_repo.get_common_ancestor(
752 ancestor = target_repo.get_common_ancestor(
726 target_commit.raw_id, source_commit.raw_id, source_repo)
753 target_commit.raw_id, source_commit.raw_id, source_repo)
727
754
728 pull_request.source_ref = '%s:%s:%s' % (
755 pull_request.source_ref = '%s:%s:%s' % (
729 source_ref_type, source_ref_name, source_commit.raw_id)
756 source_ref_type, source_ref_name, source_commit.raw_id)
730 pull_request.target_ref = '%s:%s:%s' % (
757 pull_request.target_ref = '%s:%s:%s' % (
731 target_ref_type, target_ref_name, ancestor)
758 target_ref_type, target_ref_name, ancestor)
732
759
733 pull_request.revisions = [
760 pull_request.revisions = [
734 commit.raw_id for commit in reversed(commit_ranges)]
761 commit.raw_id for commit in reversed(commit_ranges)]
735 pull_request.updated_on = datetime.datetime.now()
762 pull_request.updated_on = datetime.datetime.now()
736 Session().add(pull_request)
763 Session().add(pull_request)
737 new_commit_ids = pull_request.revisions
764 new_commit_ids = pull_request.revisions
738
765
739 old_diff_data, new_diff_data = self._generate_update_diffs(
766 old_diff_data, new_diff_data = self._generate_update_diffs(
740 pull_request, pull_request_version)
767 pull_request, pull_request_version)
741
768
742 # calculate commit and file changes
769 # calculate commit and file changes
743 changes = self._calculate_commit_id_changes(
770 changes = self._calculate_commit_id_changes(
744 old_commit_ids, new_commit_ids)
771 old_commit_ids, new_commit_ids)
745 file_changes = self._calculate_file_changes(
772 file_changes = self._calculate_file_changes(
746 old_diff_data, new_diff_data)
773 old_diff_data, new_diff_data)
747
774
748 # set comments as outdated if DIFFS changed
775 # set comments as outdated if DIFFS changed
749 CommentsModel().outdate_comments(
776 CommentsModel().outdate_comments(
750 pull_request, old_diff_data=old_diff_data,
777 pull_request, old_diff_data=old_diff_data,
751 new_diff_data=new_diff_data)
778 new_diff_data=new_diff_data)
752
779
753 commit_changes = (changes.added or changes.removed)
780 commit_changes = (changes.added or changes.removed)
754 file_node_changes = (
781 file_node_changes = (
755 file_changes.added or file_changes.modified or file_changes.removed)
782 file_changes.added or file_changes.modified or file_changes.removed)
756 pr_has_changes = commit_changes or file_node_changes
783 pr_has_changes = commit_changes or file_node_changes
757
784
758 # Add an automatic comment to the pull request, in case
785 # Add an automatic comment to the pull request, in case
759 # anything has changed
786 # anything has changed
760 if pr_has_changes:
787 if pr_has_changes:
761 update_comment = CommentsModel().create(
788 update_comment = CommentsModel().create(
762 text=self._render_update_message(changes, file_changes),
789 text=self._render_update_message(changes, file_changes),
763 repo=pull_request.target_repo,
790 repo=pull_request.target_repo,
764 user=pull_request.author,
791 user=pull_request.author,
765 pull_request=pull_request,
792 pull_request=pull_request,
766 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
793 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
767
794
768 # Update status to "Under Review" for added commits
795 # Update status to "Under Review" for added commits
769 for commit_id in changes.added:
796 for commit_id in changes.added:
770 ChangesetStatusModel().set_status(
797 ChangesetStatusModel().set_status(
771 repo=pull_request.source_repo,
798 repo=pull_request.source_repo,
772 status=ChangesetStatus.STATUS_UNDER_REVIEW,
799 status=ChangesetStatus.STATUS_UNDER_REVIEW,
773 comment=update_comment,
800 comment=update_comment,
774 user=pull_request.author,
801 user=pull_request.author,
775 pull_request=pull_request,
802 pull_request=pull_request,
776 revision=commit_id)
803 revision=commit_id)
777
804
778 log.debug(
805 log.debug(
779 'Updated pull request %s, added_ids: %s, common_ids: %s, '
806 'Updated pull request %s, added_ids: %s, common_ids: %s, '
780 'removed_ids: %s', pull_request.pull_request_id,
807 'removed_ids: %s', pull_request.pull_request_id,
781 changes.added, changes.common, changes.removed)
808 changes.added, changes.common, changes.removed)
782 log.debug(
809 log.debug(
783 'Updated pull request with the following file changes: %s',
810 'Updated pull request with the following file changes: %s',
784 file_changes)
811 file_changes)
785
812
786 log.info(
813 log.info(
787 "Updated pull request %s from commit %s to commit %s, "
814 "Updated pull request %s from commit %s to commit %s, "
788 "stored new version %s of this pull request.",
815 "stored new version %s of this pull request.",
789 pull_request.pull_request_id, source_ref_id,
816 pull_request.pull_request_id, source_ref_id,
790 pull_request.source_ref_parts.commit_id,
817 pull_request.source_ref_parts.commit_id,
791 pull_request_version.pull_request_version_id)
818 pull_request_version.pull_request_version_id)
792 Session().commit()
819 Session().commit()
793 self._trigger_pull_request_hook(
820 self._trigger_pull_request_hook(
794 pull_request, pull_request.author, 'update')
821 pull_request, pull_request.author, 'update')
795
822
796 return UpdateResponse(
823 return UpdateResponse(
797 executed=True, reason=UpdateFailureReason.NONE,
824 executed=True, reason=UpdateFailureReason.NONE,
798 old=pull_request, new=pull_request_version, changes=changes,
825 old=pull_request, new=pull_request_version, changes=changes,
799 source_changed=source_changed, target_changed=target_changed)
826 source_changed=source_changed, target_changed=target_changed)
800
827
801 def _create_version_from_snapshot(self, pull_request):
828 def _create_version_from_snapshot(self, pull_request):
802 version = PullRequestVersion()
829 version = PullRequestVersion()
803 version.title = pull_request.title
830 version.title = pull_request.title
804 version.description = pull_request.description
831 version.description = pull_request.description
805 version.status = pull_request.status
832 version.status = pull_request.status
806 version.created_on = datetime.datetime.now()
833 version.created_on = datetime.datetime.now()
807 version.updated_on = pull_request.updated_on
834 version.updated_on = pull_request.updated_on
808 version.user_id = pull_request.user_id
835 version.user_id = pull_request.user_id
809 version.source_repo = pull_request.source_repo
836 version.source_repo = pull_request.source_repo
810 version.source_ref = pull_request.source_ref
837 version.source_ref = pull_request.source_ref
811 version.target_repo = pull_request.target_repo
838 version.target_repo = pull_request.target_repo
812 version.target_ref = pull_request.target_ref
839 version.target_ref = pull_request.target_ref
813
840
814 version._last_merge_source_rev = pull_request._last_merge_source_rev
841 version._last_merge_source_rev = pull_request._last_merge_source_rev
815 version._last_merge_target_rev = pull_request._last_merge_target_rev
842 version._last_merge_target_rev = pull_request._last_merge_target_rev
816 version.last_merge_status = pull_request.last_merge_status
843 version.last_merge_status = pull_request.last_merge_status
817 version.shadow_merge_ref = pull_request.shadow_merge_ref
844 version.shadow_merge_ref = pull_request.shadow_merge_ref
818 version.merge_rev = pull_request.merge_rev
845 version.merge_rev = pull_request.merge_rev
819 version.reviewer_data = pull_request.reviewer_data
846 version.reviewer_data = pull_request.reviewer_data
820
847
821 version.revisions = pull_request.revisions
848 version.revisions = pull_request.revisions
822 version.pull_request = pull_request
849 version.pull_request = pull_request
823 Session().add(version)
850 Session().add(version)
824 Session().flush()
851 Session().flush()
825
852
826 return version
853 return version
827
854
828 def _generate_update_diffs(self, pull_request, pull_request_version):
855 def _generate_update_diffs(self, pull_request, pull_request_version):
829
856
830 diff_context = (
857 diff_context = (
831 self.DIFF_CONTEXT +
858 self.DIFF_CONTEXT +
832 CommentsModel.needed_extra_diff_context())
859 CommentsModel.needed_extra_diff_context())
833
860
834 source_repo = pull_request_version.source_repo
861 source_repo = pull_request_version.source_repo
835 source_ref_id = pull_request_version.source_ref_parts.commit_id
862 source_ref_id = pull_request_version.source_ref_parts.commit_id
836 target_ref_id = pull_request_version.target_ref_parts.commit_id
863 target_ref_id = pull_request_version.target_ref_parts.commit_id
837 old_diff = self._get_diff_from_pr_or_version(
864 old_diff = self._get_diff_from_pr_or_version(
838 source_repo, source_ref_id, target_ref_id, context=diff_context)
865 source_repo, source_ref_id, target_ref_id, context=diff_context)
839
866
840 source_repo = pull_request.source_repo
867 source_repo = pull_request.source_repo
841 source_ref_id = pull_request.source_ref_parts.commit_id
868 source_ref_id = pull_request.source_ref_parts.commit_id
842 target_ref_id = pull_request.target_ref_parts.commit_id
869 target_ref_id = pull_request.target_ref_parts.commit_id
843
870
844 new_diff = self._get_diff_from_pr_or_version(
871 new_diff = self._get_diff_from_pr_or_version(
845 source_repo, source_ref_id, target_ref_id, context=diff_context)
872 source_repo, source_ref_id, target_ref_id, context=diff_context)
846
873
847 old_diff_data = diffs.DiffProcessor(old_diff)
874 old_diff_data = diffs.DiffProcessor(old_diff)
848 old_diff_data.prepare()
875 old_diff_data.prepare()
849 new_diff_data = diffs.DiffProcessor(new_diff)
876 new_diff_data = diffs.DiffProcessor(new_diff)
850 new_diff_data.prepare()
877 new_diff_data.prepare()
851
878
852 return old_diff_data, new_diff_data
879 return old_diff_data, new_diff_data
853
880
854 def _link_comments_to_version(self, pull_request_version):
881 def _link_comments_to_version(self, pull_request_version):
855 """
882 """
856 Link all unlinked comments of this pull request to the given version.
883 Link all unlinked comments of this pull request to the given version.
857
884
858 :param pull_request_version: The `PullRequestVersion` to which
885 :param pull_request_version: The `PullRequestVersion` to which
859 the comments shall be linked.
886 the comments shall be linked.
860
887
861 """
888 """
862 pull_request = pull_request_version.pull_request
889 pull_request = pull_request_version.pull_request
863 comments = ChangesetComment.query()\
890 comments = ChangesetComment.query()\
864 .filter(
891 .filter(
865 # TODO: johbo: Should we query for the repo at all here?
892 # TODO: johbo: Should we query for the repo at all here?
866 # Pending decision on how comments of PRs are to be related
893 # Pending decision on how comments of PRs are to be related
867 # to either the source repo, the target repo or no repo at all.
894 # to either the source repo, the target repo or no repo at all.
868 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
895 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
869 ChangesetComment.pull_request == pull_request,
896 ChangesetComment.pull_request == pull_request,
870 ChangesetComment.pull_request_version == None)\
897 ChangesetComment.pull_request_version == None)\
871 .order_by(ChangesetComment.comment_id.asc())
898 .order_by(ChangesetComment.comment_id.asc())
872
899
873 # TODO: johbo: Find out why this breaks if it is done in a bulk
900 # TODO: johbo: Find out why this breaks if it is done in a bulk
874 # operation.
901 # operation.
875 for comment in comments:
902 for comment in comments:
876 comment.pull_request_version_id = (
903 comment.pull_request_version_id = (
877 pull_request_version.pull_request_version_id)
904 pull_request_version.pull_request_version_id)
878 Session().add(comment)
905 Session().add(comment)
879
906
880 def _calculate_commit_id_changes(self, old_ids, new_ids):
907 def _calculate_commit_id_changes(self, old_ids, new_ids):
881 added = [x for x in new_ids if x not in old_ids]
908 added = [x for x in new_ids if x not in old_ids]
882 common = [x for x in new_ids if x in old_ids]
909 common = [x for x in new_ids if x in old_ids]
883 removed = [x for x in old_ids if x not in new_ids]
910 removed = [x for x in old_ids if x not in new_ids]
884 total = new_ids
911 total = new_ids
885 return ChangeTuple(added, common, removed, total)
912 return ChangeTuple(added, common, removed, total)
886
913
887 def _calculate_file_changes(self, old_diff_data, new_diff_data):
914 def _calculate_file_changes(self, old_diff_data, new_diff_data):
888
915
889 old_files = OrderedDict()
916 old_files = OrderedDict()
890 for diff_data in old_diff_data.parsed_diff:
917 for diff_data in old_diff_data.parsed_diff:
891 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
918 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
892
919
893 added_files = []
920 added_files = []
894 modified_files = []
921 modified_files = []
895 removed_files = []
922 removed_files = []
896 for diff_data in new_diff_data.parsed_diff:
923 for diff_data in new_diff_data.parsed_diff:
897 new_filename = diff_data['filename']
924 new_filename = diff_data['filename']
898 new_hash = md5_safe(diff_data['raw_diff'])
925 new_hash = md5_safe(diff_data['raw_diff'])
899
926
900 old_hash = old_files.get(new_filename)
927 old_hash = old_files.get(new_filename)
901 if not old_hash:
928 if not old_hash:
902 # file is not present in old diff, means it's added
929 # file is not present in old diff, means it's added
903 added_files.append(new_filename)
930 added_files.append(new_filename)
904 else:
931 else:
905 if new_hash != old_hash:
932 if new_hash != old_hash:
906 modified_files.append(new_filename)
933 modified_files.append(new_filename)
907 # now remove a file from old, since we have seen it already
934 # now remove a file from old, since we have seen it already
908 del old_files[new_filename]
935 del old_files[new_filename]
909
936
910 # removed files is when there are present in old, but not in NEW,
937 # removed files is when there are present in old, but not in NEW,
911 # since we remove old files that are present in new diff, left-overs
938 # since we remove old files that are present in new diff, left-overs
912 # if any should be the removed files
939 # if any should be the removed files
913 removed_files.extend(old_files.keys())
940 removed_files.extend(old_files.keys())
914
941
915 return FileChangeTuple(added_files, modified_files, removed_files)
942 return FileChangeTuple(added_files, modified_files, removed_files)
916
943
917 def _render_update_message(self, changes, file_changes):
944 def _render_update_message(self, changes, file_changes):
918 """
945 """
919 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
946 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
920 so it's always looking the same disregarding on which default
947 so it's always looking the same disregarding on which default
921 renderer system is using.
948 renderer system is using.
922
949
923 :param changes: changes named tuple
950 :param changes: changes named tuple
924 :param file_changes: file changes named tuple
951 :param file_changes: file changes named tuple
925
952
926 """
953 """
927 new_status = ChangesetStatus.get_status_lbl(
954 new_status = ChangesetStatus.get_status_lbl(
928 ChangesetStatus.STATUS_UNDER_REVIEW)
955 ChangesetStatus.STATUS_UNDER_REVIEW)
929
956
930 changed_files = (
957 changed_files = (
931 file_changes.added + file_changes.modified + file_changes.removed)
958 file_changes.added + file_changes.modified + file_changes.removed)
932
959
933 params = {
960 params = {
934 'under_review_label': new_status,
961 'under_review_label': new_status,
935 'added_commits': changes.added,
962 'added_commits': changes.added,
936 'removed_commits': changes.removed,
963 'removed_commits': changes.removed,
937 'changed_files': changed_files,
964 'changed_files': changed_files,
938 'added_files': file_changes.added,
965 'added_files': file_changes.added,
939 'modified_files': file_changes.modified,
966 'modified_files': file_changes.modified,
940 'removed_files': file_changes.removed,
967 'removed_files': file_changes.removed,
941 }
968 }
942 renderer = RstTemplateRenderer()
969 renderer = RstTemplateRenderer()
943 return renderer.render('pull_request_update.mako', **params)
970 return renderer.render('pull_request_update.mako', **params)
944
971
945 def edit(self, pull_request, title, description, user):
972 def edit(self, pull_request, title, description, user):
946 pull_request = self.__get_pull_request(pull_request)
973 pull_request = self.__get_pull_request(pull_request)
947 old_data = pull_request.get_api_data(with_merge_state=False)
974 old_data = pull_request.get_api_data(with_merge_state=False)
948 if pull_request.is_closed():
975 if pull_request.is_closed():
949 raise ValueError('This pull request is closed')
976 raise ValueError('This pull request is closed')
950 if title:
977 if title:
951 pull_request.title = title
978 pull_request.title = title
952 pull_request.description = description
979 pull_request.description = description
953 pull_request.updated_on = datetime.datetime.now()
980 pull_request.updated_on = datetime.datetime.now()
954 Session().add(pull_request)
981 Session().add(pull_request)
955 self._log_audit_action(
982 self._log_audit_action(
956 'repo.pull_request.edit', {'old_data': old_data},
983 'repo.pull_request.edit', {'old_data': old_data},
957 user, pull_request)
984 user, pull_request)
958
985
959 def update_reviewers(self, pull_request, reviewer_data, user):
986 def update_reviewers(self, pull_request, reviewer_data, user):
960 """
987 """
961 Update the reviewers in the pull request
988 Update the reviewers in the pull request
962
989
963 :param pull_request: the pr to update
990 :param pull_request: the pr to update
964 :param reviewer_data: list of tuples
991 :param reviewer_data: list of tuples
965 [(user, ['reason1', 'reason2'], mandatory_flag)]
992 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
966 """
993 """
967 pull_request = self.__get_pull_request(pull_request)
994 pull_request = self.__get_pull_request(pull_request)
968 if pull_request.is_closed():
995 if pull_request.is_closed():
969 raise ValueError('This pull request is closed')
996 raise ValueError('This pull request is closed')
970
997
971 reviewers = {}
998 reviewers = {}
972 for user_id, reasons, mandatory in reviewer_data:
999 for user_id, reasons, mandatory, rules in reviewer_data:
973 if isinstance(user_id, (int, basestring)):
1000 if isinstance(user_id, (int, basestring)):
974 user_id = self._get_user(user_id).user_id
1001 user_id = self._get_user(user_id).user_id
975 reviewers[user_id] = {
1002 reviewers[user_id] = {
976 'reasons': reasons, 'mandatory': mandatory}
1003 'reasons': reasons, 'mandatory': mandatory}
977
1004
978 reviewers_ids = set(reviewers.keys())
1005 reviewers_ids = set(reviewers.keys())
979 current_reviewers = PullRequestReviewers.query()\
1006 current_reviewers = PullRequestReviewers.query()\
980 .filter(PullRequestReviewers.pull_request ==
1007 .filter(PullRequestReviewers.pull_request ==
981 pull_request).all()
1008 pull_request).all()
982 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1009 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
983
1010
984 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1011 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
985 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1012 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
986
1013
987 log.debug("Adding %s reviewers", ids_to_add)
1014 log.debug("Adding %s reviewers", ids_to_add)
988 log.debug("Removing %s reviewers", ids_to_remove)
1015 log.debug("Removing %s reviewers", ids_to_remove)
989 changed = False
1016 changed = False
990 for uid in ids_to_add:
1017 for uid in ids_to_add:
991 changed = True
1018 changed = True
992 _usr = self._get_user(uid)
1019 _usr = self._get_user(uid)
993 reviewer = PullRequestReviewers()
1020 reviewer = PullRequestReviewers()
994 reviewer.user = _usr
1021 reviewer.user = _usr
995 reviewer.pull_request = pull_request
1022 reviewer.pull_request = pull_request
996 reviewer.reasons = reviewers[uid]['reasons']
1023 reviewer.reasons = reviewers[uid]['reasons']
997 # NOTE(marcink): mandatory shouldn't be changed now
1024 # NOTE(marcink): mandatory shouldn't be changed now
998 # reviewer.mandatory = reviewers[uid]['reasons']
1025 # reviewer.mandatory = reviewers[uid]['reasons']
999 Session().add(reviewer)
1026 Session().add(reviewer)
1000 self._log_audit_action(
1027 self._log_audit_action(
1001 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1028 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1002 user, pull_request)
1029 user, pull_request)
1003
1030
1004 for uid in ids_to_remove:
1031 for uid in ids_to_remove:
1005 changed = True
1032 changed = True
1006 reviewers = PullRequestReviewers.query()\
1033 reviewers = PullRequestReviewers.query()\
1007 .filter(PullRequestReviewers.user_id == uid,
1034 .filter(PullRequestReviewers.user_id == uid,
1008 PullRequestReviewers.pull_request == pull_request)\
1035 PullRequestReviewers.pull_request == pull_request)\
1009 .all()
1036 .all()
1010 # use .all() in case we accidentally added the same person twice
1037 # use .all() in case we accidentally added the same person twice
1011 # this CAN happen due to the lack of DB checks
1038 # this CAN happen due to the lack of DB checks
1012 for obj in reviewers:
1039 for obj in reviewers:
1013 old_data = obj.get_dict()
1040 old_data = obj.get_dict()
1014 Session().delete(obj)
1041 Session().delete(obj)
1015 self._log_audit_action(
1042 self._log_audit_action(
1016 'repo.pull_request.reviewer.delete',
1043 'repo.pull_request.reviewer.delete',
1017 {'old_data': old_data}, user, pull_request)
1044 {'old_data': old_data}, user, pull_request)
1018
1045
1019 if changed:
1046 if changed:
1020 pull_request.updated_on = datetime.datetime.now()
1047 pull_request.updated_on = datetime.datetime.now()
1021 Session().add(pull_request)
1048 Session().add(pull_request)
1022
1049
1023 self.notify_reviewers(pull_request, ids_to_add)
1050 self.notify_reviewers(pull_request, ids_to_add)
1024 return ids_to_add, ids_to_remove
1051 return ids_to_add, ids_to_remove
1025
1052
1026 def get_url(self, pull_request, request=None, permalink=False):
1053 def get_url(self, pull_request, request=None, permalink=False):
1027 if not request:
1054 if not request:
1028 request = get_current_request()
1055 request = get_current_request()
1029
1056
1030 if permalink:
1057 if permalink:
1031 return request.route_url(
1058 return request.route_url(
1032 'pull_requests_global',
1059 'pull_requests_global',
1033 pull_request_id=pull_request.pull_request_id,)
1060 pull_request_id=pull_request.pull_request_id,)
1034 else:
1061 else:
1035 return request.route_url('pullrequest_show',
1062 return request.route_url('pullrequest_show',
1036 repo_name=safe_str(pull_request.target_repo.repo_name),
1063 repo_name=safe_str(pull_request.target_repo.repo_name),
1037 pull_request_id=pull_request.pull_request_id,)
1064 pull_request_id=pull_request.pull_request_id,)
1038
1065
1039 def get_shadow_clone_url(self, pull_request):
1066 def get_shadow_clone_url(self, pull_request):
1040 """
1067 """
1041 Returns qualified url pointing to the shadow repository. If this pull
1068 Returns qualified url pointing to the shadow repository. If this pull
1042 request is closed there is no shadow repository and ``None`` will be
1069 request is closed there is no shadow repository and ``None`` will be
1043 returned.
1070 returned.
1044 """
1071 """
1045 if pull_request.is_closed():
1072 if pull_request.is_closed():
1046 return None
1073 return None
1047 else:
1074 else:
1048 pr_url = urllib.unquote(self.get_url(pull_request))
1075 pr_url = urllib.unquote(self.get_url(pull_request))
1049 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1076 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1050
1077
1051 def notify_reviewers(self, pull_request, reviewers_ids):
1078 def notify_reviewers(self, pull_request, reviewers_ids):
1052 # notification to reviewers
1079 # notification to reviewers
1053 if not reviewers_ids:
1080 if not reviewers_ids:
1054 return
1081 return
1055
1082
1056 pull_request_obj = pull_request
1083 pull_request_obj = pull_request
1057 # get the current participants of this pull request
1084 # get the current participants of this pull request
1058 recipients = reviewers_ids
1085 recipients = reviewers_ids
1059 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1086 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1060
1087
1061 pr_source_repo = pull_request_obj.source_repo
1088 pr_source_repo = pull_request_obj.source_repo
1062 pr_target_repo = pull_request_obj.target_repo
1089 pr_target_repo = pull_request_obj.target_repo
1063
1090
1064 pr_url = h.route_url('pullrequest_show',
1091 pr_url = h.route_url('pullrequest_show',
1065 repo_name=pr_target_repo.repo_name,
1092 repo_name=pr_target_repo.repo_name,
1066 pull_request_id=pull_request_obj.pull_request_id,)
1093 pull_request_id=pull_request_obj.pull_request_id,)
1067
1094
1068 # set some variables for email notification
1095 # set some variables for email notification
1069 pr_target_repo_url = h.route_url(
1096 pr_target_repo_url = h.route_url(
1070 'repo_summary', repo_name=pr_target_repo.repo_name)
1097 'repo_summary', repo_name=pr_target_repo.repo_name)
1071
1098
1072 pr_source_repo_url = h.route_url(
1099 pr_source_repo_url = h.route_url(
1073 'repo_summary', repo_name=pr_source_repo.repo_name)
1100 'repo_summary', repo_name=pr_source_repo.repo_name)
1074
1101
1075 # pull request specifics
1102 # pull request specifics
1076 pull_request_commits = [
1103 pull_request_commits = [
1077 (x.raw_id, x.message)
1104 (x.raw_id, x.message)
1078 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1105 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1079
1106
1080 kwargs = {
1107 kwargs = {
1081 'user': pull_request.author,
1108 'user': pull_request.author,
1082 'pull_request': pull_request_obj,
1109 'pull_request': pull_request_obj,
1083 'pull_request_commits': pull_request_commits,
1110 'pull_request_commits': pull_request_commits,
1084
1111
1085 'pull_request_target_repo': pr_target_repo,
1112 'pull_request_target_repo': pr_target_repo,
1086 'pull_request_target_repo_url': pr_target_repo_url,
1113 'pull_request_target_repo_url': pr_target_repo_url,
1087
1114
1088 'pull_request_source_repo': pr_source_repo,
1115 'pull_request_source_repo': pr_source_repo,
1089 'pull_request_source_repo_url': pr_source_repo_url,
1116 'pull_request_source_repo_url': pr_source_repo_url,
1090
1117
1091 'pull_request_url': pr_url,
1118 'pull_request_url': pr_url,
1092 }
1119 }
1093
1120
1094 # pre-generate the subject for notification itself
1121 # pre-generate the subject for notification itself
1095 (subject,
1122 (subject,
1096 _h, _e, # we don't care about those
1123 _h, _e, # we don't care about those
1097 body_plaintext) = EmailNotificationModel().render_email(
1124 body_plaintext) = EmailNotificationModel().render_email(
1098 notification_type, **kwargs)
1125 notification_type, **kwargs)
1099
1126
1100 # create notification objects, and emails
1127 # create notification objects, and emails
1101 NotificationModel().create(
1128 NotificationModel().create(
1102 created_by=pull_request.author,
1129 created_by=pull_request.author,
1103 notification_subject=subject,
1130 notification_subject=subject,
1104 notification_body=body_plaintext,
1131 notification_body=body_plaintext,
1105 notification_type=notification_type,
1132 notification_type=notification_type,
1106 recipients=recipients,
1133 recipients=recipients,
1107 email_kwargs=kwargs,
1134 email_kwargs=kwargs,
1108 )
1135 )
1109
1136
1110 def delete(self, pull_request, user):
1137 def delete(self, pull_request, user):
1111 pull_request = self.__get_pull_request(pull_request)
1138 pull_request = self.__get_pull_request(pull_request)
1112 old_data = pull_request.get_api_data(with_merge_state=False)
1139 old_data = pull_request.get_api_data(with_merge_state=False)
1113 self._cleanup_merge_workspace(pull_request)
1140 self._cleanup_merge_workspace(pull_request)
1114 self._log_audit_action(
1141 self._log_audit_action(
1115 'repo.pull_request.delete', {'old_data': old_data},
1142 'repo.pull_request.delete', {'old_data': old_data},
1116 user, pull_request)
1143 user, pull_request)
1117 Session().delete(pull_request)
1144 Session().delete(pull_request)
1118
1145
1119 def close_pull_request(self, pull_request, user):
1146 def close_pull_request(self, pull_request, user):
1120 pull_request = self.__get_pull_request(pull_request)
1147 pull_request = self.__get_pull_request(pull_request)
1121 self._cleanup_merge_workspace(pull_request)
1148 self._cleanup_merge_workspace(pull_request)
1122 pull_request.status = PullRequest.STATUS_CLOSED
1149 pull_request.status = PullRequest.STATUS_CLOSED
1123 pull_request.updated_on = datetime.datetime.now()
1150 pull_request.updated_on = datetime.datetime.now()
1124 Session().add(pull_request)
1151 Session().add(pull_request)
1125 self._trigger_pull_request_hook(
1152 self._trigger_pull_request_hook(
1126 pull_request, pull_request.author, 'close')
1153 pull_request, pull_request.author, 'close')
1127
1154
1128 pr_data = pull_request.get_api_data(with_merge_state=False)
1155 pr_data = pull_request.get_api_data(with_merge_state=False)
1129 self._log_audit_action(
1156 self._log_audit_action(
1130 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1157 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1131
1158
1132 def close_pull_request_with_comment(
1159 def close_pull_request_with_comment(
1133 self, pull_request, user, repo, message=None):
1160 self, pull_request, user, repo, message=None):
1134
1161
1135 pull_request_review_status = pull_request.calculated_review_status()
1162 pull_request_review_status = pull_request.calculated_review_status()
1136
1163
1137 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1164 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1138 # approved only if we have voting consent
1165 # approved only if we have voting consent
1139 status = ChangesetStatus.STATUS_APPROVED
1166 status = ChangesetStatus.STATUS_APPROVED
1140 else:
1167 else:
1141 status = ChangesetStatus.STATUS_REJECTED
1168 status = ChangesetStatus.STATUS_REJECTED
1142 status_lbl = ChangesetStatus.get_status_lbl(status)
1169 status_lbl = ChangesetStatus.get_status_lbl(status)
1143
1170
1144 default_message = (
1171 default_message = (
1145 'Closing with status change {transition_icon} {status}.'
1172 'Closing with status change {transition_icon} {status}.'
1146 ).format(transition_icon='>', status=status_lbl)
1173 ).format(transition_icon='>', status=status_lbl)
1147 text = message or default_message
1174 text = message or default_message
1148
1175
1149 # create a comment, and link it to new status
1176 # create a comment, and link it to new status
1150 comment = CommentsModel().create(
1177 comment = CommentsModel().create(
1151 text=text,
1178 text=text,
1152 repo=repo.repo_id,
1179 repo=repo.repo_id,
1153 user=user.user_id,
1180 user=user.user_id,
1154 pull_request=pull_request.pull_request_id,
1181 pull_request=pull_request.pull_request_id,
1155 status_change=status_lbl,
1182 status_change=status_lbl,
1156 status_change_type=status,
1183 status_change_type=status,
1157 closing_pr=True
1184 closing_pr=True
1158 )
1185 )
1159
1186
1160 # calculate old status before we change it
1187 # calculate old status before we change it
1161 old_calculated_status = pull_request.calculated_review_status()
1188 old_calculated_status = pull_request.calculated_review_status()
1162 ChangesetStatusModel().set_status(
1189 ChangesetStatusModel().set_status(
1163 repo.repo_id,
1190 repo.repo_id,
1164 status,
1191 status,
1165 user.user_id,
1192 user.user_id,
1166 comment=comment,
1193 comment=comment,
1167 pull_request=pull_request.pull_request_id
1194 pull_request=pull_request.pull_request_id
1168 )
1195 )
1169
1196
1170 Session().flush()
1197 Session().flush()
1171 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1198 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1172 # we now calculate the status of pull request again, and based on that
1199 # we now calculate the status of pull request again, and based on that
1173 # calculation trigger status change. This might happen in cases
1200 # calculation trigger status change. This might happen in cases
1174 # that non-reviewer admin closes a pr, which means his vote doesn't
1201 # that non-reviewer admin closes a pr, which means his vote doesn't
1175 # change the status, while if he's a reviewer this might change it.
1202 # change the status, while if he's a reviewer this might change it.
1176 calculated_status = pull_request.calculated_review_status()
1203 calculated_status = pull_request.calculated_review_status()
1177 if old_calculated_status != calculated_status:
1204 if old_calculated_status != calculated_status:
1178 self._trigger_pull_request_hook(
1205 self._trigger_pull_request_hook(
1179 pull_request, user, 'review_status_change')
1206 pull_request, user, 'review_status_change')
1180
1207
1181 # finally close the PR
1208 # finally close the PR
1182 PullRequestModel().close_pull_request(
1209 PullRequestModel().close_pull_request(
1183 pull_request.pull_request_id, user)
1210 pull_request.pull_request_id, user)
1184
1211
1185 return comment, status
1212 return comment, status
1186
1213
1187 def merge_status(self, pull_request, translator=None):
1214 def merge_status(self, pull_request, translator=None):
1188 _ = translator or get_current_request().translate
1215 _ = translator or get_current_request().translate
1189
1216
1190 if not self._is_merge_enabled(pull_request):
1217 if not self._is_merge_enabled(pull_request):
1191 return False, _('Server-side pull request merging is disabled.')
1218 return False, _('Server-side pull request merging is disabled.')
1192 if pull_request.is_closed():
1219 if pull_request.is_closed():
1193 return False, _('This pull request is closed.')
1220 return False, _('This pull request is closed.')
1194 merge_possible, msg = self._check_repo_requirements(
1221 merge_possible, msg = self._check_repo_requirements(
1195 target=pull_request.target_repo, source=pull_request.source_repo,
1222 target=pull_request.target_repo, source=pull_request.source_repo,
1196 translator=_)
1223 translator=_)
1197 if not merge_possible:
1224 if not merge_possible:
1198 return merge_possible, msg
1225 return merge_possible, msg
1199
1226
1200 try:
1227 try:
1201 resp = self._try_merge(pull_request)
1228 resp = self._try_merge(pull_request)
1202 log.debug("Merge response: %s", resp)
1229 log.debug("Merge response: %s", resp)
1203 status = resp.possible, self.merge_status_message(
1230 status = resp.possible, self.merge_status_message(
1204 resp.failure_reason)
1231 resp.failure_reason)
1205 except NotImplementedError:
1232 except NotImplementedError:
1206 status = False, _('Pull request merging is not supported.')
1233 status = False, _('Pull request merging is not supported.')
1207
1234
1208 return status
1235 return status
1209
1236
1210 def _check_repo_requirements(self, target, source, translator):
1237 def _check_repo_requirements(self, target, source, translator):
1211 """
1238 """
1212 Check if `target` and `source` have compatible requirements.
1239 Check if `target` and `source` have compatible requirements.
1213
1240
1214 Currently this is just checking for largefiles.
1241 Currently this is just checking for largefiles.
1215 """
1242 """
1216 _ = translator
1243 _ = translator
1217 target_has_largefiles = self._has_largefiles(target)
1244 target_has_largefiles = self._has_largefiles(target)
1218 source_has_largefiles = self._has_largefiles(source)
1245 source_has_largefiles = self._has_largefiles(source)
1219 merge_possible = True
1246 merge_possible = True
1220 message = u''
1247 message = u''
1221
1248
1222 if target_has_largefiles != source_has_largefiles:
1249 if target_has_largefiles != source_has_largefiles:
1223 merge_possible = False
1250 merge_possible = False
1224 if source_has_largefiles:
1251 if source_has_largefiles:
1225 message = _(
1252 message = _(
1226 'Target repository large files support is disabled.')
1253 'Target repository large files support is disabled.')
1227 else:
1254 else:
1228 message = _(
1255 message = _(
1229 'Source repository large files support is disabled.')
1256 'Source repository large files support is disabled.')
1230
1257
1231 return merge_possible, message
1258 return merge_possible, message
1232
1259
1233 def _has_largefiles(self, repo):
1260 def _has_largefiles(self, repo):
1234 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1261 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1235 'extensions', 'largefiles')
1262 'extensions', 'largefiles')
1236 return largefiles_ui and largefiles_ui[0].active
1263 return largefiles_ui and largefiles_ui[0].active
1237
1264
1238 def _try_merge(self, pull_request):
1265 def _try_merge(self, pull_request):
1239 """
1266 """
1240 Try to merge the pull request and return the merge status.
1267 Try to merge the pull request and return the merge status.
1241 """
1268 """
1242 log.debug(
1269 log.debug(
1243 "Trying out if the pull request %s can be merged.",
1270 "Trying out if the pull request %s can be merged.",
1244 pull_request.pull_request_id)
1271 pull_request.pull_request_id)
1245 target_vcs = pull_request.target_repo.scm_instance()
1272 target_vcs = pull_request.target_repo.scm_instance()
1246
1273
1247 # Refresh the target reference.
1274 # Refresh the target reference.
1248 try:
1275 try:
1249 target_ref = self._refresh_reference(
1276 target_ref = self._refresh_reference(
1250 pull_request.target_ref_parts, target_vcs)
1277 pull_request.target_ref_parts, target_vcs)
1251 except CommitDoesNotExistError:
1278 except CommitDoesNotExistError:
1252 merge_state = MergeResponse(
1279 merge_state = MergeResponse(
1253 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1280 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1254 return merge_state
1281 return merge_state
1255
1282
1256 target_locked = pull_request.target_repo.locked
1283 target_locked = pull_request.target_repo.locked
1257 if target_locked and target_locked[0]:
1284 if target_locked and target_locked[0]:
1258 log.debug("The target repository is locked.")
1285 log.debug("The target repository is locked.")
1259 merge_state = MergeResponse(
1286 merge_state = MergeResponse(
1260 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1287 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1261 elif self._needs_merge_state_refresh(pull_request, target_ref):
1288 elif self._needs_merge_state_refresh(pull_request, target_ref):
1262 log.debug("Refreshing the merge status of the repository.")
1289 log.debug("Refreshing the merge status of the repository.")
1263 merge_state = self._refresh_merge_state(
1290 merge_state = self._refresh_merge_state(
1264 pull_request, target_vcs, target_ref)
1291 pull_request, target_vcs, target_ref)
1265 else:
1292 else:
1266 possible = pull_request.\
1293 possible = pull_request.\
1267 last_merge_status == MergeFailureReason.NONE
1294 last_merge_status == MergeFailureReason.NONE
1268 merge_state = MergeResponse(
1295 merge_state = MergeResponse(
1269 possible, False, None, pull_request.last_merge_status)
1296 possible, False, None, pull_request.last_merge_status)
1270
1297
1271 return merge_state
1298 return merge_state
1272
1299
1273 def _refresh_reference(self, reference, vcs_repository):
1300 def _refresh_reference(self, reference, vcs_repository):
1274 if reference.type in ('branch', 'book'):
1301 if reference.type in ('branch', 'book'):
1275 name_or_id = reference.name
1302 name_or_id = reference.name
1276 else:
1303 else:
1277 name_or_id = reference.commit_id
1304 name_or_id = reference.commit_id
1278 refreshed_commit = vcs_repository.get_commit(name_or_id)
1305 refreshed_commit = vcs_repository.get_commit(name_or_id)
1279 refreshed_reference = Reference(
1306 refreshed_reference = Reference(
1280 reference.type, reference.name, refreshed_commit.raw_id)
1307 reference.type, reference.name, refreshed_commit.raw_id)
1281 return refreshed_reference
1308 return refreshed_reference
1282
1309
1283 def _needs_merge_state_refresh(self, pull_request, target_reference):
1310 def _needs_merge_state_refresh(self, pull_request, target_reference):
1284 return not(
1311 return not(
1285 pull_request.revisions and
1312 pull_request.revisions and
1286 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1313 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1287 target_reference.commit_id == pull_request._last_merge_target_rev)
1314 target_reference.commit_id == pull_request._last_merge_target_rev)
1288
1315
1289 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1316 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1290 workspace_id = self._workspace_id(pull_request)
1317 workspace_id = self._workspace_id(pull_request)
1291 source_vcs = pull_request.source_repo.scm_instance()
1318 source_vcs = pull_request.source_repo.scm_instance()
1292 use_rebase = self._use_rebase_for_merging(pull_request)
1319 use_rebase = self._use_rebase_for_merging(pull_request)
1293 close_branch = self._close_branch_before_merging(pull_request)
1320 close_branch = self._close_branch_before_merging(pull_request)
1294 merge_state = target_vcs.merge(
1321 merge_state = target_vcs.merge(
1295 target_reference, source_vcs, pull_request.source_ref_parts,
1322 target_reference, source_vcs, pull_request.source_ref_parts,
1296 workspace_id, dry_run=True, use_rebase=use_rebase,
1323 workspace_id, dry_run=True, use_rebase=use_rebase,
1297 close_branch=close_branch)
1324 close_branch=close_branch)
1298
1325
1299 # Do not store the response if there was an unknown error.
1326 # Do not store the response if there was an unknown error.
1300 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1327 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1301 pull_request._last_merge_source_rev = \
1328 pull_request._last_merge_source_rev = \
1302 pull_request.source_ref_parts.commit_id
1329 pull_request.source_ref_parts.commit_id
1303 pull_request._last_merge_target_rev = target_reference.commit_id
1330 pull_request._last_merge_target_rev = target_reference.commit_id
1304 pull_request.last_merge_status = merge_state.failure_reason
1331 pull_request.last_merge_status = merge_state.failure_reason
1305 pull_request.shadow_merge_ref = merge_state.merge_ref
1332 pull_request.shadow_merge_ref = merge_state.merge_ref
1306 Session().add(pull_request)
1333 Session().add(pull_request)
1307 Session().commit()
1334 Session().commit()
1308
1335
1309 return merge_state
1336 return merge_state
1310
1337
1311 def _workspace_id(self, pull_request):
1338 def _workspace_id(self, pull_request):
1312 workspace_id = 'pr-%s' % pull_request.pull_request_id
1339 workspace_id = 'pr-%s' % pull_request.pull_request_id
1313 return workspace_id
1340 return workspace_id
1314
1341
1315 def merge_status_message(self, status_code):
1342 def merge_status_message(self, status_code):
1316 """
1343 """
1317 Return a human friendly error message for the given merge status code.
1344 Return a human friendly error message for the given merge status code.
1318 """
1345 """
1319 return self.MERGE_STATUS_MESSAGES[status_code]
1346 return self.MERGE_STATUS_MESSAGES[status_code]
1320
1347
1321 def generate_repo_data(self, repo, commit_id=None, branch=None,
1348 def generate_repo_data(self, repo, commit_id=None, branch=None,
1322 bookmark=None, translator=None):
1349 bookmark=None, translator=None):
1323 from rhodecode.model.repo import RepoModel
1350 from rhodecode.model.repo import RepoModel
1324
1351
1325 all_refs, selected_ref = \
1352 all_refs, selected_ref = \
1326 self._get_repo_pullrequest_sources(
1353 self._get_repo_pullrequest_sources(
1327 repo.scm_instance(), commit_id=commit_id,
1354 repo.scm_instance(), commit_id=commit_id,
1328 branch=branch, bookmark=bookmark, translator=translator)
1355 branch=branch, bookmark=bookmark, translator=translator)
1329
1356
1330 refs_select2 = []
1357 refs_select2 = []
1331 for element in all_refs:
1358 for element in all_refs:
1332 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1359 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1333 refs_select2.append({'text': element[1], 'children': children})
1360 refs_select2.append({'text': element[1], 'children': children})
1334
1361
1335 return {
1362 return {
1336 'user': {
1363 'user': {
1337 'user_id': repo.user.user_id,
1364 'user_id': repo.user.user_id,
1338 'username': repo.user.username,
1365 'username': repo.user.username,
1339 'firstname': repo.user.first_name,
1366 'firstname': repo.user.first_name,
1340 'lastname': repo.user.last_name,
1367 'lastname': repo.user.last_name,
1341 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1368 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1342 },
1369 },
1343 'name': repo.repo_name,
1370 'name': repo.repo_name,
1344 'link': RepoModel().get_url(repo),
1371 'link': RepoModel().get_url(repo),
1345 'description': h.chop_at_smart(repo.description_safe, '\n'),
1372 'description': h.chop_at_smart(repo.description_safe, '\n'),
1346 'refs': {
1373 'refs': {
1347 'all_refs': all_refs,
1374 'all_refs': all_refs,
1348 'selected_ref': selected_ref,
1375 'selected_ref': selected_ref,
1349 'select2_refs': refs_select2
1376 'select2_refs': refs_select2
1350 }
1377 }
1351 }
1378 }
1352
1379
1353 def generate_pullrequest_title(self, source, source_ref, target):
1380 def generate_pullrequest_title(self, source, source_ref, target):
1354 return u'{source}#{at_ref} to {target}'.format(
1381 return u'{source}#{at_ref} to {target}'.format(
1355 source=source,
1382 source=source,
1356 at_ref=source_ref,
1383 at_ref=source_ref,
1357 target=target,
1384 target=target,
1358 )
1385 )
1359
1386
1360 def _cleanup_merge_workspace(self, pull_request):
1387 def _cleanup_merge_workspace(self, pull_request):
1361 # Merging related cleanup
1388 # Merging related cleanup
1362 target_scm = pull_request.target_repo.scm_instance()
1389 target_scm = pull_request.target_repo.scm_instance()
1363 workspace_id = 'pr-%s' % pull_request.pull_request_id
1390 workspace_id = 'pr-%s' % pull_request.pull_request_id
1364
1391
1365 try:
1392 try:
1366 target_scm.cleanup_merge_workspace(workspace_id)
1393 target_scm.cleanup_merge_workspace(workspace_id)
1367 except NotImplementedError:
1394 except NotImplementedError:
1368 pass
1395 pass
1369
1396
1370 def _get_repo_pullrequest_sources(
1397 def _get_repo_pullrequest_sources(
1371 self, repo, commit_id=None, branch=None, bookmark=None,
1398 self, repo, commit_id=None, branch=None, bookmark=None,
1372 translator=None):
1399 translator=None):
1373 """
1400 """
1374 Return a structure with repo's interesting commits, suitable for
1401 Return a structure with repo's interesting commits, suitable for
1375 the selectors in pullrequest controller
1402 the selectors in pullrequest controller
1376
1403
1377 :param commit_id: a commit that must be in the list somehow
1404 :param commit_id: a commit that must be in the list somehow
1378 and selected by default
1405 and selected by default
1379 :param branch: a branch that must be in the list and selected
1406 :param branch: a branch that must be in the list and selected
1380 by default - even if closed
1407 by default - even if closed
1381 :param bookmark: a bookmark that must be in the list and selected
1408 :param bookmark: a bookmark that must be in the list and selected
1382 """
1409 """
1383 _ = translator or get_current_request().translate
1410 _ = translator or get_current_request().translate
1384
1411
1385 commit_id = safe_str(commit_id) if commit_id else None
1412 commit_id = safe_str(commit_id) if commit_id else None
1386 branch = safe_str(branch) if branch else None
1413 branch = safe_str(branch) if branch else None
1387 bookmark = safe_str(bookmark) if bookmark else None
1414 bookmark = safe_str(bookmark) if bookmark else None
1388
1415
1389 selected = None
1416 selected = None
1390
1417
1391 # order matters: first source that has commit_id in it will be selected
1418 # order matters: first source that has commit_id in it will be selected
1392 sources = []
1419 sources = []
1393 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1420 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1394 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1421 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1395
1422
1396 if commit_id:
1423 if commit_id:
1397 ref_commit = (h.short_id(commit_id), commit_id)
1424 ref_commit = (h.short_id(commit_id), commit_id)
1398 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1425 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1399
1426
1400 sources.append(
1427 sources.append(
1401 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1428 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1402 )
1429 )
1403
1430
1404 groups = []
1431 groups = []
1405 for group_key, ref_list, group_name, match in sources:
1432 for group_key, ref_list, group_name, match in sources:
1406 group_refs = []
1433 group_refs = []
1407 for ref_name, ref_id in ref_list:
1434 for ref_name, ref_id in ref_list:
1408 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1435 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1409 group_refs.append((ref_key, ref_name))
1436 group_refs.append((ref_key, ref_name))
1410
1437
1411 if not selected:
1438 if not selected:
1412 if set([commit_id, match]) & set([ref_id, ref_name]):
1439 if set([commit_id, match]) & set([ref_id, ref_name]):
1413 selected = ref_key
1440 selected = ref_key
1414
1441
1415 if group_refs:
1442 if group_refs:
1416 groups.append((group_refs, group_name))
1443 groups.append((group_refs, group_name))
1417
1444
1418 if not selected:
1445 if not selected:
1419 ref = commit_id or branch or bookmark
1446 ref = commit_id or branch or bookmark
1420 if ref:
1447 if ref:
1421 raise CommitDoesNotExistError(
1448 raise CommitDoesNotExistError(
1422 'No commit refs could be found matching: %s' % ref)
1449 'No commit refs could be found matching: %s' % ref)
1423 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1450 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1424 selected = 'branch:%s:%s' % (
1451 selected = 'branch:%s:%s' % (
1425 repo.DEFAULT_BRANCH_NAME,
1452 repo.DEFAULT_BRANCH_NAME,
1426 repo.branches[repo.DEFAULT_BRANCH_NAME]
1453 repo.branches[repo.DEFAULT_BRANCH_NAME]
1427 )
1454 )
1428 elif repo.commit_ids:
1455 elif repo.commit_ids:
1429 # make the user select in this case
1456 # make the user select in this case
1430 selected = None
1457 selected = None
1431 else:
1458 else:
1432 raise EmptyRepositoryError()
1459 raise EmptyRepositoryError()
1433 return groups, selected
1460 return groups, selected
1434
1461
1435 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1462 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1436 return self._get_diff_from_pr_or_version(
1463 return self._get_diff_from_pr_or_version(
1437 source_repo, source_ref_id, target_ref_id, context=context)
1464 source_repo, source_ref_id, target_ref_id, context=context)
1438
1465
1439 def _get_diff_from_pr_or_version(
1466 def _get_diff_from_pr_or_version(
1440 self, source_repo, source_ref_id, target_ref_id, context):
1467 self, source_repo, source_ref_id, target_ref_id, context):
1441 target_commit = source_repo.get_commit(
1468 target_commit = source_repo.get_commit(
1442 commit_id=safe_str(target_ref_id))
1469 commit_id=safe_str(target_ref_id))
1443 source_commit = source_repo.get_commit(
1470 source_commit = source_repo.get_commit(
1444 commit_id=safe_str(source_ref_id))
1471 commit_id=safe_str(source_ref_id))
1445 if isinstance(source_repo, Repository):
1472 if isinstance(source_repo, Repository):
1446 vcs_repo = source_repo.scm_instance()
1473 vcs_repo = source_repo.scm_instance()
1447 else:
1474 else:
1448 vcs_repo = source_repo
1475 vcs_repo = source_repo
1449
1476
1450 # TODO: johbo: In the context of an update, we cannot reach
1477 # TODO: johbo: In the context of an update, we cannot reach
1451 # the old commit anymore with our normal mechanisms. It needs
1478 # the old commit anymore with our normal mechanisms. It needs
1452 # some sort of special support in the vcs layer to avoid this
1479 # some sort of special support in the vcs layer to avoid this
1453 # workaround.
1480 # workaround.
1454 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1481 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1455 vcs_repo.alias == 'git'):
1482 vcs_repo.alias == 'git'):
1456 source_commit.raw_id = safe_str(source_ref_id)
1483 source_commit.raw_id = safe_str(source_ref_id)
1457
1484
1458 log.debug('calculating diff between '
1485 log.debug('calculating diff between '
1459 'source_ref:%s and target_ref:%s for repo `%s`',
1486 'source_ref:%s and target_ref:%s for repo `%s`',
1460 target_ref_id, source_ref_id,
1487 target_ref_id, source_ref_id,
1461 safe_unicode(vcs_repo.path))
1488 safe_unicode(vcs_repo.path))
1462
1489
1463 vcs_diff = vcs_repo.get_diff(
1490 vcs_diff = vcs_repo.get_diff(
1464 commit1=target_commit, commit2=source_commit, context=context)
1491 commit1=target_commit, commit2=source_commit, context=context)
1465 return vcs_diff
1492 return vcs_diff
1466
1493
1467 def _is_merge_enabled(self, pull_request):
1494 def _is_merge_enabled(self, pull_request):
1468 return self._get_general_setting(
1495 return self._get_general_setting(
1469 pull_request, 'rhodecode_pr_merge_enabled')
1496 pull_request, 'rhodecode_pr_merge_enabled')
1470
1497
1471 def _use_rebase_for_merging(self, pull_request):
1498 def _use_rebase_for_merging(self, pull_request):
1472 repo_type = pull_request.target_repo.repo_type
1499 repo_type = pull_request.target_repo.repo_type
1473 if repo_type == 'hg':
1500 if repo_type == 'hg':
1474 return self._get_general_setting(
1501 return self._get_general_setting(
1475 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1502 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1476 elif repo_type == 'git':
1503 elif repo_type == 'git':
1477 return self._get_general_setting(
1504 return self._get_general_setting(
1478 pull_request, 'rhodecode_git_use_rebase_for_merging')
1505 pull_request, 'rhodecode_git_use_rebase_for_merging')
1479
1506
1480 return False
1507 return False
1481
1508
1482 def _close_branch_before_merging(self, pull_request):
1509 def _close_branch_before_merging(self, pull_request):
1483 repo_type = pull_request.target_repo.repo_type
1510 repo_type = pull_request.target_repo.repo_type
1484 if repo_type == 'hg':
1511 if repo_type == 'hg':
1485 return self._get_general_setting(
1512 return self._get_general_setting(
1486 pull_request, 'rhodecode_hg_close_branch_before_merging')
1513 pull_request, 'rhodecode_hg_close_branch_before_merging')
1487 elif repo_type == 'git':
1514 elif repo_type == 'git':
1488 return self._get_general_setting(
1515 return self._get_general_setting(
1489 pull_request, 'rhodecode_git_close_branch_before_merging')
1516 pull_request, 'rhodecode_git_close_branch_before_merging')
1490
1517
1491 return False
1518 return False
1492
1519
1493 def _get_general_setting(self, pull_request, settings_key, default=False):
1520 def _get_general_setting(self, pull_request, settings_key, default=False):
1494 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1521 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1495 settings = settings_model.get_general_settings()
1522 settings = settings_model.get_general_settings()
1496 return settings.get(settings_key, default)
1523 return settings.get(settings_key, default)
1497
1524
1498 def _log_audit_action(self, action, action_data, user, pull_request):
1525 def _log_audit_action(self, action, action_data, user, pull_request):
1499 audit_logger.store(
1526 audit_logger.store(
1500 action=action,
1527 action=action,
1501 action_data=action_data,
1528 action_data=action_data,
1502 user=user,
1529 user=user,
1503 repo=pull_request.target_repo)
1530 repo=pull_request.target_repo)
1504
1531
1505 def get_reviewer_functions(self):
1532 def get_reviewer_functions(self):
1506 """
1533 """
1507 Fetches functions for validation and fetching default reviewers.
1534 Fetches functions for validation and fetching default reviewers.
1508 If available we use the EE package, else we fallback to CE
1535 If available we use the EE package, else we fallback to CE
1509 package functions
1536 package functions
1510 """
1537 """
1511 try:
1538 try:
1512 from rc_reviewers.utils import get_default_reviewers_data
1539 from rc_reviewers.utils import get_default_reviewers_data
1513 from rc_reviewers.utils import validate_default_reviewers
1540 from rc_reviewers.utils import validate_default_reviewers
1514 except ImportError:
1541 except ImportError:
1515 from rhodecode.apps.repository.utils import \
1542 from rhodecode.apps.repository.utils import \
1516 get_default_reviewers_data
1543 get_default_reviewers_data
1517 from rhodecode.apps.repository.utils import \
1544 from rhodecode.apps.repository.utils import \
1518 validate_default_reviewers
1545 validate_default_reviewers
1519
1546
1520 return get_default_reviewers_data, validate_default_reviewers
1547 return get_default_reviewers_data, validate_default_reviewers
1521
1548
1522
1549
1523 class MergeCheck(object):
1550 class MergeCheck(object):
1524 """
1551 """
1525 Perform Merge Checks and returns a check object which stores information
1552 Perform Merge Checks and returns a check object which stores information
1526 about merge errors, and merge conditions
1553 about merge errors, and merge conditions
1527 """
1554 """
1528 TODO_CHECK = 'todo'
1555 TODO_CHECK = 'todo'
1529 PERM_CHECK = 'perm'
1556 PERM_CHECK = 'perm'
1530 REVIEW_CHECK = 'review'
1557 REVIEW_CHECK = 'review'
1531 MERGE_CHECK = 'merge'
1558 MERGE_CHECK = 'merge'
1532
1559
1533 def __init__(self):
1560 def __init__(self):
1534 self.review_status = None
1561 self.review_status = None
1535 self.merge_possible = None
1562 self.merge_possible = None
1536 self.merge_msg = ''
1563 self.merge_msg = ''
1537 self.failed = None
1564 self.failed = None
1538 self.errors = []
1565 self.errors = []
1539 self.error_details = OrderedDict()
1566 self.error_details = OrderedDict()
1540
1567
1541 def push_error(self, error_type, message, error_key, details):
1568 def push_error(self, error_type, message, error_key, details):
1542 self.failed = True
1569 self.failed = True
1543 self.errors.append([error_type, message])
1570 self.errors.append([error_type, message])
1544 self.error_details[error_key] = dict(
1571 self.error_details[error_key] = dict(
1545 details=details,
1572 details=details,
1546 error_type=error_type,
1573 error_type=error_type,
1547 message=message
1574 message=message
1548 )
1575 )
1549
1576
1550 @classmethod
1577 @classmethod
1551 def validate(cls, pull_request, user, translator, fail_early=False):
1578 def validate(cls, pull_request, user, translator, fail_early=False):
1552 _ = translator
1579 _ = translator
1553 merge_check = cls()
1580 merge_check = cls()
1554
1581
1555 # permissions to merge
1582 # permissions to merge
1556 user_allowed_to_merge = PullRequestModel().check_user_merge(
1583 user_allowed_to_merge = PullRequestModel().check_user_merge(
1557 pull_request, user)
1584 pull_request, user)
1558 if not user_allowed_to_merge:
1585 if not user_allowed_to_merge:
1559 log.debug("MergeCheck: cannot merge, approval is pending.")
1586 log.debug("MergeCheck: cannot merge, approval is pending.")
1560
1587
1561 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1588 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1562 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1589 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1563 if fail_early:
1590 if fail_early:
1564 return merge_check
1591 return merge_check
1565
1592
1566 # review status, must be always present
1593 # review status, must be always present
1567 review_status = pull_request.calculated_review_status()
1594 review_status = pull_request.calculated_review_status()
1568 merge_check.review_status = review_status
1595 merge_check.review_status = review_status
1569
1596
1570 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1597 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1571 if not status_approved:
1598 if not status_approved:
1572 log.debug("MergeCheck: cannot merge, approval is pending.")
1599 log.debug("MergeCheck: cannot merge, approval is pending.")
1573
1600
1574 msg = _('Pull request reviewer approval is pending.')
1601 msg = _('Pull request reviewer approval is pending.')
1575
1602
1576 merge_check.push_error(
1603 merge_check.push_error(
1577 'warning', msg, cls.REVIEW_CHECK, review_status)
1604 'warning', msg, cls.REVIEW_CHECK, review_status)
1578
1605
1579 if fail_early:
1606 if fail_early:
1580 return merge_check
1607 return merge_check
1581
1608
1582 # left over TODOs
1609 # left over TODOs
1583 todos = CommentsModel().get_unresolved_todos(pull_request)
1610 todos = CommentsModel().get_unresolved_todos(pull_request)
1584 if todos:
1611 if todos:
1585 log.debug("MergeCheck: cannot merge, {} "
1612 log.debug("MergeCheck: cannot merge, {} "
1586 "unresolved todos left.".format(len(todos)))
1613 "unresolved todos left.".format(len(todos)))
1587
1614
1588 if len(todos) == 1:
1615 if len(todos) == 1:
1589 msg = _('Cannot merge, {} TODO still not resolved.').format(
1616 msg = _('Cannot merge, {} TODO still not resolved.').format(
1590 len(todos))
1617 len(todos))
1591 else:
1618 else:
1592 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1619 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1593 len(todos))
1620 len(todos))
1594
1621
1595 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1622 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1596
1623
1597 if fail_early:
1624 if fail_early:
1598 return merge_check
1625 return merge_check
1599
1626
1600 # merge possible
1627 # merge possible
1601 merge_status, msg = PullRequestModel().merge_status(
1628 merge_status, msg = PullRequestModel().merge_status(
1602 pull_request, translator=translator)
1629 pull_request, translator=translator)
1603 merge_check.merge_possible = merge_status
1630 merge_check.merge_possible = merge_status
1604 merge_check.merge_msg = msg
1631 merge_check.merge_msg = msg
1605 if not merge_status:
1632 if not merge_status:
1606 log.debug(
1633 log.debug(
1607 "MergeCheck: cannot merge, pull request merge not possible.")
1634 "MergeCheck: cannot merge, pull request merge not possible.")
1608 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1635 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1609
1636
1610 if fail_early:
1637 if fail_early:
1611 return merge_check
1638 return merge_check
1612
1639
1613 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1640 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1614 return merge_check
1641 return merge_check
1615
1642
1616 @classmethod
1643 @classmethod
1617 def get_merge_conditions(cls, pull_request, translator):
1644 def get_merge_conditions(cls, pull_request, translator):
1618 _ = translator
1645 _ = translator
1619 merge_details = {}
1646 merge_details = {}
1620
1647
1621 model = PullRequestModel()
1648 model = PullRequestModel()
1622 use_rebase = model._use_rebase_for_merging(pull_request)
1649 use_rebase = model._use_rebase_for_merging(pull_request)
1623
1650
1624 if use_rebase:
1651 if use_rebase:
1625 merge_details['merge_strategy'] = dict(
1652 merge_details['merge_strategy'] = dict(
1626 details={},
1653 details={},
1627 message=_('Merge strategy: rebase')
1654 message=_('Merge strategy: rebase')
1628 )
1655 )
1629 else:
1656 else:
1630 merge_details['merge_strategy'] = dict(
1657 merge_details['merge_strategy'] = dict(
1631 details={},
1658 details={},
1632 message=_('Merge strategy: explicit merge commit')
1659 message=_('Merge strategy: explicit merge commit')
1633 )
1660 )
1634
1661
1635 close_branch = model._close_branch_before_merging(pull_request)
1662 close_branch = model._close_branch_before_merging(pull_request)
1636 if close_branch:
1663 if close_branch:
1637 repo_type = pull_request.target_repo.repo_type
1664 repo_type = pull_request.target_repo.repo_type
1638 if repo_type == 'hg':
1665 if repo_type == 'hg':
1639 close_msg = _('Source branch will be closed after merge.')
1666 close_msg = _('Source branch will be closed after merge.')
1640 elif repo_type == 'git':
1667 elif repo_type == 'git':
1641 close_msg = _('Source branch will be deleted after merge.')
1668 close_msg = _('Source branch will be deleted after merge.')
1642
1669
1643 merge_details['close_branch'] = dict(
1670 merge_details['close_branch'] = dict(
1644 details={},
1671 details={},
1645 message=close_msg
1672 message=close_msg
1646 )
1673 )
1647
1674
1648 return merge_details
1675 return merge_details
1649
1676
1650 ChangeTuple = collections.namedtuple(
1677 ChangeTuple = collections.namedtuple(
1651 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1678 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1652
1679
1653 FileChangeTuple = collections.namedtuple(
1680 FileChangeTuple = collections.namedtuple(
1654 'FileChangeTuple', ['added', 'modified', 'removed'])
1681 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,913 +1,914 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 users model for RhodeCode
22 users model for RhodeCode
23 """
23 """
24
24
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
27 import datetime
28 import ipaddress
28 import ipaddress
29
29
30 from pyramid.threadlocal import get_current_request
30 from pyramid.threadlocal import get_current_request
31 from sqlalchemy.exc import DatabaseError
31 from sqlalchemy.exc import DatabaseError
32
32
33 from rhodecode import events
33 from rhodecode import events
34 from rhodecode.lib.user_log_filter import user_log_filter
34 from rhodecode.lib.user_log_filter import user_log_filter
35 from rhodecode.lib.utils2 import (
35 from rhodecode.lib.utils2 import (
36 safe_unicode, get_current_rhodecode_user, action_logger_generic,
36 safe_unicode, get_current_rhodecode_user, action_logger_generic,
37 AttributeDict, str2bool)
37 AttributeDict, str2bool)
38 from rhodecode.lib.exceptions import (
38 from rhodecode.lib.exceptions import (
39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
40 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
40 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
41 from rhodecode.lib.caching_query import FromCache
41 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.model import BaseModel
42 from rhodecode.model import BaseModel
43 from rhodecode.model.auth_token import AuthTokenModel
43 from rhodecode.model.auth_token import AuthTokenModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 _hash_key, true, false, or_, joinedload, User, UserToPerm,
45 _hash_key, true, false, or_, joinedload, User, UserToPerm,
46 UserEmailMap, UserIpMap, UserLog)
46 UserEmailMap, UserIpMap, UserLog)
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.repo_group import RepoGroupModel
48 from rhodecode.model.repo_group import RepoGroupModel
49
49
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class UserModel(BaseModel):
54 class UserModel(BaseModel):
55 cls = User
55 cls = User
56
56
57 def get(self, user_id, cache=False):
57 def get(self, user_id, cache=False):
58 user = self.sa.query(User)
58 user = self.sa.query(User)
59 if cache:
59 if cache:
60 user = user.options(
60 user = user.options(
61 FromCache("sql_cache_short", "get_user_%s" % user_id))
61 FromCache("sql_cache_short", "get_user_%s" % user_id))
62 return user.get(user_id)
62 return user.get(user_id)
63
63
64 def get_user(self, user):
64 def get_user(self, user):
65 return self._get_user(user)
65 return self._get_user(user)
66
66
67 def _serialize_user(self, user):
67 def _serialize_user(self, user):
68 import rhodecode.lib.helpers as h
68 import rhodecode.lib.helpers as h
69
69
70 return {
70 return {
71 'id': user.user_id,
71 'id': user.user_id,
72 'first_name': user.first_name,
72 'first_name': user.first_name,
73 'last_name': user.last_name,
73 'last_name': user.last_name,
74 'username': user.username,
74 'username': user.username,
75 'email': user.email,
75 'email': user.email,
76 'icon_link': h.gravatar_url(user.email, 30),
76 'icon_link': h.gravatar_url(user.email, 30),
77 'profile_link': h.link_to_user(user),
77 'value_display': h.escape(h.person(user)),
78 'value_display': h.escape(h.person(user)),
78 'value': user.username,
79 'value': user.username,
79 'value_type': 'user',
80 'value_type': 'user',
80 'active': user.active,
81 'active': user.active,
81 }
82 }
82
83
83 def get_users(self, name_contains=None, limit=20, only_active=True):
84 def get_users(self, name_contains=None, limit=20, only_active=True):
84
85
85 query = self.sa.query(User)
86 query = self.sa.query(User)
86 if only_active:
87 if only_active:
87 query = query.filter(User.active == true())
88 query = query.filter(User.active == true())
88
89
89 if name_contains:
90 if name_contains:
90 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
91 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
91 query = query.filter(
92 query = query.filter(
92 or_(
93 or_(
93 User.name.ilike(ilike_expression),
94 User.name.ilike(ilike_expression),
94 User.lastname.ilike(ilike_expression),
95 User.lastname.ilike(ilike_expression),
95 User.username.ilike(ilike_expression)
96 User.username.ilike(ilike_expression)
96 )
97 )
97 )
98 )
98 query = query.limit(limit)
99 query = query.limit(limit)
99 users = query.all()
100 users = query.all()
100
101
101 _users = [
102 _users = [
102 self._serialize_user(user) for user in users
103 self._serialize_user(user) for user in users
103 ]
104 ]
104 return _users
105 return _users
105
106
106 def get_by_username(self, username, cache=False, case_insensitive=False):
107 def get_by_username(self, username, cache=False, case_insensitive=False):
107
108
108 if case_insensitive:
109 if case_insensitive:
109 user = self.sa.query(User).filter(User.username.ilike(username))
110 user = self.sa.query(User).filter(User.username.ilike(username))
110 else:
111 else:
111 user = self.sa.query(User)\
112 user = self.sa.query(User)\
112 .filter(User.username == username)
113 .filter(User.username == username)
113 if cache:
114 if cache:
114 name_key = _hash_key(username)
115 name_key = _hash_key(username)
115 user = user.options(
116 user = user.options(
116 FromCache("sql_cache_short", "get_user_%s" % name_key))
117 FromCache("sql_cache_short", "get_user_%s" % name_key))
117 return user.scalar()
118 return user.scalar()
118
119
119 def get_by_email(self, email, cache=False, case_insensitive=False):
120 def get_by_email(self, email, cache=False, case_insensitive=False):
120 return User.get_by_email(email, case_insensitive, cache)
121 return User.get_by_email(email, case_insensitive, cache)
121
122
122 def get_by_auth_token(self, auth_token, cache=False):
123 def get_by_auth_token(self, auth_token, cache=False):
123 return User.get_by_auth_token(auth_token, cache)
124 return User.get_by_auth_token(auth_token, cache)
124
125
125 def get_active_user_count(self, cache=False):
126 def get_active_user_count(self, cache=False):
126 qry = User.query().filter(
127 qry = User.query().filter(
127 User.active == true()).filter(
128 User.active == true()).filter(
128 User.username != User.DEFAULT_USER)
129 User.username != User.DEFAULT_USER)
129 if cache:
130 if cache:
130 qry = qry.options(
131 qry = qry.options(
131 FromCache("sql_cache_short", "get_active_users"))
132 FromCache("sql_cache_short", "get_active_users"))
132 return qry.count()
133 return qry.count()
133
134
134 def create(self, form_data, cur_user=None):
135 def create(self, form_data, cur_user=None):
135 if not cur_user:
136 if not cur_user:
136 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
137 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
137
138
138 user_data = {
139 user_data = {
139 'username': form_data['username'],
140 'username': form_data['username'],
140 'password': form_data['password'],
141 'password': form_data['password'],
141 'email': form_data['email'],
142 'email': form_data['email'],
142 'firstname': form_data['firstname'],
143 'firstname': form_data['firstname'],
143 'lastname': form_data['lastname'],
144 'lastname': form_data['lastname'],
144 'active': form_data['active'],
145 'active': form_data['active'],
145 'extern_type': form_data['extern_type'],
146 'extern_type': form_data['extern_type'],
146 'extern_name': form_data['extern_name'],
147 'extern_name': form_data['extern_name'],
147 'admin': False,
148 'admin': False,
148 'cur_user': cur_user
149 'cur_user': cur_user
149 }
150 }
150
151
151 if 'create_repo_group' in form_data:
152 if 'create_repo_group' in form_data:
152 user_data['create_repo_group'] = str2bool(
153 user_data['create_repo_group'] = str2bool(
153 form_data.get('create_repo_group'))
154 form_data.get('create_repo_group'))
154
155
155 try:
156 try:
156 if form_data.get('password_change'):
157 if form_data.get('password_change'):
157 user_data['force_password_change'] = True
158 user_data['force_password_change'] = True
158 return UserModel().create_or_update(**user_data)
159 return UserModel().create_or_update(**user_data)
159 except Exception:
160 except Exception:
160 log.error(traceback.format_exc())
161 log.error(traceback.format_exc())
161 raise
162 raise
162
163
163 def update_user(self, user, skip_attrs=None, **kwargs):
164 def update_user(self, user, skip_attrs=None, **kwargs):
164 from rhodecode.lib.auth import get_crypt_password
165 from rhodecode.lib.auth import get_crypt_password
165
166
166 user = self._get_user(user)
167 user = self._get_user(user)
167 if user.username == User.DEFAULT_USER:
168 if user.username == User.DEFAULT_USER:
168 raise DefaultUserException(
169 raise DefaultUserException(
169 "You can't edit this user (`%(username)s`) since it's "
170 "You can't edit this user (`%(username)s`) since it's "
170 "crucial for entire application" % {
171 "crucial for entire application" % {
171 'username': user.username})
172 'username': user.username})
172
173
173 # first store only defaults
174 # first store only defaults
174 user_attrs = {
175 user_attrs = {
175 'updating_user_id': user.user_id,
176 'updating_user_id': user.user_id,
176 'username': user.username,
177 'username': user.username,
177 'password': user.password,
178 'password': user.password,
178 'email': user.email,
179 'email': user.email,
179 'firstname': user.name,
180 'firstname': user.name,
180 'lastname': user.lastname,
181 'lastname': user.lastname,
181 'active': user.active,
182 'active': user.active,
182 'admin': user.admin,
183 'admin': user.admin,
183 'extern_name': user.extern_name,
184 'extern_name': user.extern_name,
184 'extern_type': user.extern_type,
185 'extern_type': user.extern_type,
185 'language': user.user_data.get('language')
186 'language': user.user_data.get('language')
186 }
187 }
187
188
188 # in case there's new_password, that comes from form, use it to
189 # in case there's new_password, that comes from form, use it to
189 # store password
190 # store password
190 if kwargs.get('new_password'):
191 if kwargs.get('new_password'):
191 kwargs['password'] = kwargs['new_password']
192 kwargs['password'] = kwargs['new_password']
192
193
193 # cleanups, my_account password change form
194 # cleanups, my_account password change form
194 kwargs.pop('current_password', None)
195 kwargs.pop('current_password', None)
195 kwargs.pop('new_password', None)
196 kwargs.pop('new_password', None)
196
197
197 # cleanups, user edit password change form
198 # cleanups, user edit password change form
198 kwargs.pop('password_confirmation', None)
199 kwargs.pop('password_confirmation', None)
199 kwargs.pop('password_change', None)
200 kwargs.pop('password_change', None)
200
201
201 # create repo group on user creation
202 # create repo group on user creation
202 kwargs.pop('create_repo_group', None)
203 kwargs.pop('create_repo_group', None)
203
204
204 # legacy forms send name, which is the firstname
205 # legacy forms send name, which is the firstname
205 firstname = kwargs.pop('name', None)
206 firstname = kwargs.pop('name', None)
206 if firstname:
207 if firstname:
207 kwargs['firstname'] = firstname
208 kwargs['firstname'] = firstname
208
209
209 for k, v in kwargs.items():
210 for k, v in kwargs.items():
210 # skip if we don't want to update this
211 # skip if we don't want to update this
211 if skip_attrs and k in skip_attrs:
212 if skip_attrs and k in skip_attrs:
212 continue
213 continue
213
214
214 user_attrs[k] = v
215 user_attrs[k] = v
215
216
216 try:
217 try:
217 return self.create_or_update(**user_attrs)
218 return self.create_or_update(**user_attrs)
218 except Exception:
219 except Exception:
219 log.error(traceback.format_exc())
220 log.error(traceback.format_exc())
220 raise
221 raise
221
222
222 def create_or_update(
223 def create_or_update(
223 self, username, password, email, firstname='', lastname='',
224 self, username, password, email, firstname='', lastname='',
224 active=True, admin=False, extern_type=None, extern_name=None,
225 active=True, admin=False, extern_type=None, extern_name=None,
225 cur_user=None, plugin=None, force_password_change=False,
226 cur_user=None, plugin=None, force_password_change=False,
226 allow_to_create_user=True, create_repo_group=None,
227 allow_to_create_user=True, create_repo_group=None,
227 updating_user_id=None, language=None, strict_creation_check=True):
228 updating_user_id=None, language=None, strict_creation_check=True):
228 """
229 """
229 Creates a new instance if not found, or updates current one
230 Creates a new instance if not found, or updates current one
230
231
231 :param username:
232 :param username:
232 :param password:
233 :param password:
233 :param email:
234 :param email:
234 :param firstname:
235 :param firstname:
235 :param lastname:
236 :param lastname:
236 :param active:
237 :param active:
237 :param admin:
238 :param admin:
238 :param extern_type:
239 :param extern_type:
239 :param extern_name:
240 :param extern_name:
240 :param cur_user:
241 :param cur_user:
241 :param plugin: optional plugin this method was called from
242 :param plugin: optional plugin this method was called from
242 :param force_password_change: toggles new or existing user flag
243 :param force_password_change: toggles new or existing user flag
243 for password change
244 for password change
244 :param allow_to_create_user: Defines if the method can actually create
245 :param allow_to_create_user: Defines if the method can actually create
245 new users
246 new users
246 :param create_repo_group: Defines if the method should also
247 :param create_repo_group: Defines if the method should also
247 create an repo group with user name, and owner
248 create an repo group with user name, and owner
248 :param updating_user_id: if we set it up this is the user we want to
249 :param updating_user_id: if we set it up this is the user we want to
249 update this allows to editing username.
250 update this allows to editing username.
250 :param language: language of user from interface.
251 :param language: language of user from interface.
251
252
252 :returns: new User object with injected `is_new_user` attribute.
253 :returns: new User object with injected `is_new_user` attribute.
253 """
254 """
254
255
255 if not cur_user:
256 if not cur_user:
256 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
257 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
257
258
258 from rhodecode.lib.auth import (
259 from rhodecode.lib.auth import (
259 get_crypt_password, check_password, generate_auth_token)
260 get_crypt_password, check_password, generate_auth_token)
260 from rhodecode.lib.hooks_base import (
261 from rhodecode.lib.hooks_base import (
261 log_create_user, check_allowed_create_user)
262 log_create_user, check_allowed_create_user)
262
263
263 def _password_change(new_user, password):
264 def _password_change(new_user, password):
264 old_password = new_user.password or ''
265 old_password = new_user.password or ''
265 # empty password
266 # empty password
266 if not old_password:
267 if not old_password:
267 return False
268 return False
268
269
269 # password check is only needed for RhodeCode internal auth calls
270 # password check is only needed for RhodeCode internal auth calls
270 # in case it's a plugin we don't care
271 # in case it's a plugin we don't care
271 if not plugin:
272 if not plugin:
272
273
273 # first check if we gave crypted password back, and if it
274 # first check if we gave crypted password back, and if it
274 # matches it's not password change
275 # matches it's not password change
275 if new_user.password == password:
276 if new_user.password == password:
276 return False
277 return False
277
278
278 password_match = check_password(password, old_password)
279 password_match = check_password(password, old_password)
279 if not password_match:
280 if not password_match:
280 return True
281 return True
281
282
282 return False
283 return False
283
284
284 # read settings on default personal repo group creation
285 # read settings on default personal repo group creation
285 if create_repo_group is None:
286 if create_repo_group is None:
286 default_create_repo_group = RepoGroupModel()\
287 default_create_repo_group = RepoGroupModel()\
287 .get_default_create_personal_repo_group()
288 .get_default_create_personal_repo_group()
288 create_repo_group = default_create_repo_group
289 create_repo_group = default_create_repo_group
289
290
290 user_data = {
291 user_data = {
291 'username': username,
292 'username': username,
292 'password': password,
293 'password': password,
293 'email': email,
294 'email': email,
294 'firstname': firstname,
295 'firstname': firstname,
295 'lastname': lastname,
296 'lastname': lastname,
296 'active': active,
297 'active': active,
297 'admin': admin
298 'admin': admin
298 }
299 }
299
300
300 if updating_user_id:
301 if updating_user_id:
301 log.debug('Checking for existing account in RhodeCode '
302 log.debug('Checking for existing account in RhodeCode '
302 'database with user_id `%s` ' % (updating_user_id,))
303 'database with user_id `%s` ' % (updating_user_id,))
303 user = User.get(updating_user_id)
304 user = User.get(updating_user_id)
304 else:
305 else:
305 log.debug('Checking for existing account in RhodeCode '
306 log.debug('Checking for existing account in RhodeCode '
306 'database with username `%s` ' % (username,))
307 'database with username `%s` ' % (username,))
307 user = User.get_by_username(username, case_insensitive=True)
308 user = User.get_by_username(username, case_insensitive=True)
308
309
309 if user is None:
310 if user is None:
310 # we check internal flag if this method is actually allowed to
311 # we check internal flag if this method is actually allowed to
311 # create new user
312 # create new user
312 if not allow_to_create_user:
313 if not allow_to_create_user:
313 msg = ('Method wants to create new user, but it is not '
314 msg = ('Method wants to create new user, but it is not '
314 'allowed to do so')
315 'allowed to do so')
315 log.warning(msg)
316 log.warning(msg)
316 raise NotAllowedToCreateUserError(msg)
317 raise NotAllowedToCreateUserError(msg)
317
318
318 log.debug('Creating new user %s', username)
319 log.debug('Creating new user %s', username)
319
320
320 # only if we create user that is active
321 # only if we create user that is active
321 new_active_user = active
322 new_active_user = active
322 if new_active_user and strict_creation_check:
323 if new_active_user and strict_creation_check:
323 # raises UserCreationError if it's not allowed for any reason to
324 # raises UserCreationError if it's not allowed for any reason to
324 # create new active user, this also executes pre-create hooks
325 # create new active user, this also executes pre-create hooks
325 check_allowed_create_user(user_data, cur_user, strict_check=True)
326 check_allowed_create_user(user_data, cur_user, strict_check=True)
326 events.trigger(events.UserPreCreate(user_data))
327 events.trigger(events.UserPreCreate(user_data))
327 new_user = User()
328 new_user = User()
328 edit = False
329 edit = False
329 else:
330 else:
330 log.debug('updating user %s', username)
331 log.debug('updating user %s', username)
331 events.trigger(events.UserPreUpdate(user, user_data))
332 events.trigger(events.UserPreUpdate(user, user_data))
332 new_user = user
333 new_user = user
333 edit = True
334 edit = True
334
335
335 # we're not allowed to edit default user
336 # we're not allowed to edit default user
336 if user.username == User.DEFAULT_USER:
337 if user.username == User.DEFAULT_USER:
337 raise DefaultUserException(
338 raise DefaultUserException(
338 "You can't edit this user (`%(username)s`) since it's "
339 "You can't edit this user (`%(username)s`) since it's "
339 "crucial for entire application"
340 "crucial for entire application"
340 % {'username': user.username})
341 % {'username': user.username})
341
342
342 # inject special attribute that will tell us if User is new or old
343 # inject special attribute that will tell us if User is new or old
343 new_user.is_new_user = not edit
344 new_user.is_new_user = not edit
344 # for users that didn's specify auth type, we use RhodeCode built in
345 # for users that didn's specify auth type, we use RhodeCode built in
345 from rhodecode.authentication.plugins import auth_rhodecode
346 from rhodecode.authentication.plugins import auth_rhodecode
346 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
347 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
347 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
348 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
348
349
349 try:
350 try:
350 new_user.username = username
351 new_user.username = username
351 new_user.admin = admin
352 new_user.admin = admin
352 new_user.email = email
353 new_user.email = email
353 new_user.active = active
354 new_user.active = active
354 new_user.extern_name = safe_unicode(extern_name)
355 new_user.extern_name = safe_unicode(extern_name)
355 new_user.extern_type = safe_unicode(extern_type)
356 new_user.extern_type = safe_unicode(extern_type)
356 new_user.name = firstname
357 new_user.name = firstname
357 new_user.lastname = lastname
358 new_user.lastname = lastname
358
359
359 # set password only if creating an user or password is changed
360 # set password only if creating an user or password is changed
360 if not edit or _password_change(new_user, password):
361 if not edit or _password_change(new_user, password):
361 reason = 'new password' if edit else 'new user'
362 reason = 'new password' if edit else 'new user'
362 log.debug('Updating password reason=>%s', reason)
363 log.debug('Updating password reason=>%s', reason)
363 new_user.password = get_crypt_password(password) if password else None
364 new_user.password = get_crypt_password(password) if password else None
364
365
365 if force_password_change:
366 if force_password_change:
366 new_user.update_userdata(force_password_change=True)
367 new_user.update_userdata(force_password_change=True)
367 if language:
368 if language:
368 new_user.update_userdata(language=language)
369 new_user.update_userdata(language=language)
369 new_user.update_userdata(notification_status=True)
370 new_user.update_userdata(notification_status=True)
370
371
371 self.sa.add(new_user)
372 self.sa.add(new_user)
372
373
373 if not edit and create_repo_group:
374 if not edit and create_repo_group:
374 RepoGroupModel().create_personal_repo_group(
375 RepoGroupModel().create_personal_repo_group(
375 new_user, commit_early=False)
376 new_user, commit_early=False)
376
377
377 if not edit:
378 if not edit:
378 # add the RSS token
379 # add the RSS token
379 AuthTokenModel().create(username,
380 AuthTokenModel().create(username,
380 description=u'Generated feed token',
381 description=u'Generated feed token',
381 role=AuthTokenModel.cls.ROLE_FEED)
382 role=AuthTokenModel.cls.ROLE_FEED)
382 kwargs = new_user.get_dict()
383 kwargs = new_user.get_dict()
383 # backward compat, require api_keys present
384 # backward compat, require api_keys present
384 kwargs['api_keys'] = kwargs['auth_tokens']
385 kwargs['api_keys'] = kwargs['auth_tokens']
385 log_create_user(created_by=cur_user, **kwargs)
386 log_create_user(created_by=cur_user, **kwargs)
386 events.trigger(events.UserPostCreate(user_data))
387 events.trigger(events.UserPostCreate(user_data))
387 return new_user
388 return new_user
388 except (DatabaseError,):
389 except (DatabaseError,):
389 log.error(traceback.format_exc())
390 log.error(traceback.format_exc())
390 raise
391 raise
391
392
392 def create_registration(self, form_data):
393 def create_registration(self, form_data):
393 from rhodecode.model.notification import NotificationModel
394 from rhodecode.model.notification import NotificationModel
394 from rhodecode.model.notification import EmailNotificationModel
395 from rhodecode.model.notification import EmailNotificationModel
395
396
396 try:
397 try:
397 form_data['admin'] = False
398 form_data['admin'] = False
398 form_data['extern_name'] = 'rhodecode'
399 form_data['extern_name'] = 'rhodecode'
399 form_data['extern_type'] = 'rhodecode'
400 form_data['extern_type'] = 'rhodecode'
400 new_user = self.create(form_data)
401 new_user = self.create(form_data)
401
402
402 self.sa.add(new_user)
403 self.sa.add(new_user)
403 self.sa.flush()
404 self.sa.flush()
404
405
405 user_data = new_user.get_dict()
406 user_data = new_user.get_dict()
406 kwargs = {
407 kwargs = {
407 # use SQLALCHEMY safe dump of user data
408 # use SQLALCHEMY safe dump of user data
408 'user': AttributeDict(user_data),
409 'user': AttributeDict(user_data),
409 'date': datetime.datetime.now()
410 'date': datetime.datetime.now()
410 }
411 }
411 notification_type = EmailNotificationModel.TYPE_REGISTRATION
412 notification_type = EmailNotificationModel.TYPE_REGISTRATION
412 # pre-generate the subject for notification itself
413 # pre-generate the subject for notification itself
413 (subject,
414 (subject,
414 _h, _e, # we don't care about those
415 _h, _e, # we don't care about those
415 body_plaintext) = EmailNotificationModel().render_email(
416 body_plaintext) = EmailNotificationModel().render_email(
416 notification_type, **kwargs)
417 notification_type, **kwargs)
417
418
418 # create notification objects, and emails
419 # create notification objects, and emails
419 NotificationModel().create(
420 NotificationModel().create(
420 created_by=new_user,
421 created_by=new_user,
421 notification_subject=subject,
422 notification_subject=subject,
422 notification_body=body_plaintext,
423 notification_body=body_plaintext,
423 notification_type=notification_type,
424 notification_type=notification_type,
424 recipients=None, # all admins
425 recipients=None, # all admins
425 email_kwargs=kwargs,
426 email_kwargs=kwargs,
426 )
427 )
427
428
428 return new_user
429 return new_user
429 except Exception:
430 except Exception:
430 log.error(traceback.format_exc())
431 log.error(traceback.format_exc())
431 raise
432 raise
432
433
433 def _handle_user_repos(self, username, repositories, handle_mode=None):
434 def _handle_user_repos(self, username, repositories, handle_mode=None):
434 _superadmin = self.cls.get_first_super_admin()
435 _superadmin = self.cls.get_first_super_admin()
435 left_overs = True
436 left_overs = True
436
437
437 from rhodecode.model.repo import RepoModel
438 from rhodecode.model.repo import RepoModel
438
439
439 if handle_mode == 'detach':
440 if handle_mode == 'detach':
440 for obj in repositories:
441 for obj in repositories:
441 obj.user = _superadmin
442 obj.user = _superadmin
442 # set description we know why we super admin now owns
443 # set description we know why we super admin now owns
443 # additional repositories that were orphaned !
444 # additional repositories that were orphaned !
444 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
445 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
445 self.sa.add(obj)
446 self.sa.add(obj)
446 left_overs = False
447 left_overs = False
447 elif handle_mode == 'delete':
448 elif handle_mode == 'delete':
448 for obj in repositories:
449 for obj in repositories:
449 RepoModel().delete(obj, forks='detach')
450 RepoModel().delete(obj, forks='detach')
450 left_overs = False
451 left_overs = False
451
452
452 # if nothing is done we have left overs left
453 # if nothing is done we have left overs left
453 return left_overs
454 return left_overs
454
455
455 def _handle_user_repo_groups(self, username, repository_groups,
456 def _handle_user_repo_groups(self, username, repository_groups,
456 handle_mode=None):
457 handle_mode=None):
457 _superadmin = self.cls.get_first_super_admin()
458 _superadmin = self.cls.get_first_super_admin()
458 left_overs = True
459 left_overs = True
459
460
460 from rhodecode.model.repo_group import RepoGroupModel
461 from rhodecode.model.repo_group import RepoGroupModel
461
462
462 if handle_mode == 'detach':
463 if handle_mode == 'detach':
463 for r in repository_groups:
464 for r in repository_groups:
464 r.user = _superadmin
465 r.user = _superadmin
465 # set description we know why we super admin now owns
466 # set description we know why we super admin now owns
466 # additional repositories that were orphaned !
467 # additional repositories that were orphaned !
467 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
468 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
468 self.sa.add(r)
469 self.sa.add(r)
469 left_overs = False
470 left_overs = False
470 elif handle_mode == 'delete':
471 elif handle_mode == 'delete':
471 for r in repository_groups:
472 for r in repository_groups:
472 RepoGroupModel().delete(r)
473 RepoGroupModel().delete(r)
473 left_overs = False
474 left_overs = False
474
475
475 # if nothing is done we have left overs left
476 # if nothing is done we have left overs left
476 return left_overs
477 return left_overs
477
478
478 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
479 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
479 _superadmin = self.cls.get_first_super_admin()
480 _superadmin = self.cls.get_first_super_admin()
480 left_overs = True
481 left_overs = True
481
482
482 from rhodecode.model.user_group import UserGroupModel
483 from rhodecode.model.user_group import UserGroupModel
483
484
484 if handle_mode == 'detach':
485 if handle_mode == 'detach':
485 for r in user_groups:
486 for r in user_groups:
486 for user_user_group_to_perm in r.user_user_group_to_perm:
487 for user_user_group_to_perm in r.user_user_group_to_perm:
487 if user_user_group_to_perm.user.username == username:
488 if user_user_group_to_perm.user.username == username:
488 user_user_group_to_perm.user = _superadmin
489 user_user_group_to_perm.user = _superadmin
489 r.user = _superadmin
490 r.user = _superadmin
490 # set description we know why we super admin now owns
491 # set description we know why we super admin now owns
491 # additional repositories that were orphaned !
492 # additional repositories that were orphaned !
492 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
493 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
493 self.sa.add(r)
494 self.sa.add(r)
494 left_overs = False
495 left_overs = False
495 elif handle_mode == 'delete':
496 elif handle_mode == 'delete':
496 for r in user_groups:
497 for r in user_groups:
497 UserGroupModel().delete(r)
498 UserGroupModel().delete(r)
498 left_overs = False
499 left_overs = False
499
500
500 # if nothing is done we have left overs left
501 # if nothing is done we have left overs left
501 return left_overs
502 return left_overs
502
503
503 def delete(self, user, cur_user=None, handle_repos=None,
504 def delete(self, user, cur_user=None, handle_repos=None,
504 handle_repo_groups=None, handle_user_groups=None):
505 handle_repo_groups=None, handle_user_groups=None):
505 if not cur_user:
506 if not cur_user:
506 cur_user = getattr(
507 cur_user = getattr(
507 get_current_rhodecode_user(), 'username', None)
508 get_current_rhodecode_user(), 'username', None)
508 user = self._get_user(user)
509 user = self._get_user(user)
509
510
510 try:
511 try:
511 if user.username == User.DEFAULT_USER:
512 if user.username == User.DEFAULT_USER:
512 raise DefaultUserException(
513 raise DefaultUserException(
513 u"You can't remove this user since it's"
514 u"You can't remove this user since it's"
514 u" crucial for entire application")
515 u" crucial for entire application")
515
516
516 left_overs = self._handle_user_repos(
517 left_overs = self._handle_user_repos(
517 user.username, user.repositories, handle_repos)
518 user.username, user.repositories, handle_repos)
518 if left_overs and user.repositories:
519 if left_overs and user.repositories:
519 repos = [x.repo_name for x in user.repositories]
520 repos = [x.repo_name for x in user.repositories]
520 raise UserOwnsReposException(
521 raise UserOwnsReposException(
521 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
522 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
522 u'removed. Switch owners or remove those repositories:%(list_repos)s'
523 u'removed. Switch owners or remove those repositories:%(list_repos)s'
523 % {'username': user.username, 'len_repos': len(repos),
524 % {'username': user.username, 'len_repos': len(repos),
524 'list_repos': ', '.join(repos)})
525 'list_repos': ', '.join(repos)})
525
526
526 left_overs = self._handle_user_repo_groups(
527 left_overs = self._handle_user_repo_groups(
527 user.username, user.repository_groups, handle_repo_groups)
528 user.username, user.repository_groups, handle_repo_groups)
528 if left_overs and user.repository_groups:
529 if left_overs and user.repository_groups:
529 repo_groups = [x.group_name for x in user.repository_groups]
530 repo_groups = [x.group_name for x in user.repository_groups]
530 raise UserOwnsRepoGroupsException(
531 raise UserOwnsRepoGroupsException(
531 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
532 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
532 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
533 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
533 % {'username': user.username, 'len_repo_groups': len(repo_groups),
534 % {'username': user.username, 'len_repo_groups': len(repo_groups),
534 'list_repo_groups': ', '.join(repo_groups)})
535 'list_repo_groups': ', '.join(repo_groups)})
535
536
536 left_overs = self._handle_user_user_groups(
537 left_overs = self._handle_user_user_groups(
537 user.username, user.user_groups, handle_user_groups)
538 user.username, user.user_groups, handle_user_groups)
538 if left_overs and user.user_groups:
539 if left_overs and user.user_groups:
539 user_groups = [x.users_group_name for x in user.user_groups]
540 user_groups = [x.users_group_name for x in user.user_groups]
540 raise UserOwnsUserGroupsException(
541 raise UserOwnsUserGroupsException(
541 u'user "%s" still owns %s user groups and cannot be '
542 u'user "%s" still owns %s user groups and cannot be '
542 u'removed. Switch owners or remove those user groups:%s'
543 u'removed. Switch owners or remove those user groups:%s'
543 % (user.username, len(user_groups), ', '.join(user_groups)))
544 % (user.username, len(user_groups), ', '.join(user_groups)))
544
545
545 # we might change the user data with detach/delete, make sure
546 # we might change the user data with detach/delete, make sure
546 # the object is marked as expired before actually deleting !
547 # the object is marked as expired before actually deleting !
547 self.sa.expire(user)
548 self.sa.expire(user)
548 self.sa.delete(user)
549 self.sa.delete(user)
549 from rhodecode.lib.hooks_base import log_delete_user
550 from rhodecode.lib.hooks_base import log_delete_user
550 log_delete_user(deleted_by=cur_user, **user.get_dict())
551 log_delete_user(deleted_by=cur_user, **user.get_dict())
551 except Exception:
552 except Exception:
552 log.error(traceback.format_exc())
553 log.error(traceback.format_exc())
553 raise
554 raise
554
555
555 def reset_password_link(self, data, pwd_reset_url):
556 def reset_password_link(self, data, pwd_reset_url):
556 from rhodecode.lib.celerylib import tasks, run_task
557 from rhodecode.lib.celerylib import tasks, run_task
557 from rhodecode.model.notification import EmailNotificationModel
558 from rhodecode.model.notification import EmailNotificationModel
558 user_email = data['email']
559 user_email = data['email']
559 try:
560 try:
560 user = User.get_by_email(user_email)
561 user = User.get_by_email(user_email)
561 if user:
562 if user:
562 log.debug('password reset user found %s', user)
563 log.debug('password reset user found %s', user)
563
564
564 email_kwargs = {
565 email_kwargs = {
565 'password_reset_url': pwd_reset_url,
566 'password_reset_url': pwd_reset_url,
566 'user': user,
567 'user': user,
567 'email': user_email,
568 'email': user_email,
568 'date': datetime.datetime.now()
569 'date': datetime.datetime.now()
569 }
570 }
570
571
571 (subject, headers, email_body,
572 (subject, headers, email_body,
572 email_body_plaintext) = EmailNotificationModel().render_email(
573 email_body_plaintext) = EmailNotificationModel().render_email(
573 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
574 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
574
575
575 recipients = [user_email]
576 recipients = [user_email]
576
577
577 action_logger_generic(
578 action_logger_generic(
578 'sending password reset email to user: {}'.format(
579 'sending password reset email to user: {}'.format(
579 user), namespace='security.password_reset')
580 user), namespace='security.password_reset')
580
581
581 run_task(tasks.send_email, recipients, subject,
582 run_task(tasks.send_email, recipients, subject,
582 email_body_plaintext, email_body)
583 email_body_plaintext, email_body)
583
584
584 else:
585 else:
585 log.debug("password reset email %s not found", user_email)
586 log.debug("password reset email %s not found", user_email)
586 except Exception:
587 except Exception:
587 log.error(traceback.format_exc())
588 log.error(traceback.format_exc())
588 return False
589 return False
589
590
590 return True
591 return True
591
592
592 def reset_password(self, data):
593 def reset_password(self, data):
593 from rhodecode.lib.celerylib import tasks, run_task
594 from rhodecode.lib.celerylib import tasks, run_task
594 from rhodecode.model.notification import EmailNotificationModel
595 from rhodecode.model.notification import EmailNotificationModel
595 from rhodecode.lib import auth
596 from rhodecode.lib import auth
596 user_email = data['email']
597 user_email = data['email']
597 pre_db = True
598 pre_db = True
598 try:
599 try:
599 user = User.get_by_email(user_email)
600 user = User.get_by_email(user_email)
600 new_passwd = auth.PasswordGenerator().gen_password(
601 new_passwd = auth.PasswordGenerator().gen_password(
601 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
602 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
602 if user:
603 if user:
603 user.password = auth.get_crypt_password(new_passwd)
604 user.password = auth.get_crypt_password(new_passwd)
604 # also force this user to reset his password !
605 # also force this user to reset his password !
605 user.update_userdata(force_password_change=True)
606 user.update_userdata(force_password_change=True)
606
607
607 Session().add(user)
608 Session().add(user)
608
609
609 # now delete the token in question
610 # now delete the token in question
610 UserApiKeys = AuthTokenModel.cls
611 UserApiKeys = AuthTokenModel.cls
611 UserApiKeys().query().filter(
612 UserApiKeys().query().filter(
612 UserApiKeys.api_key == data['token']).delete()
613 UserApiKeys.api_key == data['token']).delete()
613
614
614 Session().commit()
615 Session().commit()
615 log.info('successfully reset password for `%s`', user_email)
616 log.info('successfully reset password for `%s`', user_email)
616
617
617 if new_passwd is None:
618 if new_passwd is None:
618 raise Exception('unable to generate new password')
619 raise Exception('unable to generate new password')
619
620
620 pre_db = False
621 pre_db = False
621
622
622 email_kwargs = {
623 email_kwargs = {
623 'new_password': new_passwd,
624 'new_password': new_passwd,
624 'user': user,
625 'user': user,
625 'email': user_email,
626 'email': user_email,
626 'date': datetime.datetime.now()
627 'date': datetime.datetime.now()
627 }
628 }
628
629
629 (subject, headers, email_body,
630 (subject, headers, email_body,
630 email_body_plaintext) = EmailNotificationModel().render_email(
631 email_body_plaintext) = EmailNotificationModel().render_email(
631 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
632 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
632 **email_kwargs)
633 **email_kwargs)
633
634
634 recipients = [user_email]
635 recipients = [user_email]
635
636
636 action_logger_generic(
637 action_logger_generic(
637 'sent new password to user: {} with email: {}'.format(
638 'sent new password to user: {} with email: {}'.format(
638 user, user_email), namespace='security.password_reset')
639 user, user_email), namespace='security.password_reset')
639
640
640 run_task(tasks.send_email, recipients, subject,
641 run_task(tasks.send_email, recipients, subject,
641 email_body_plaintext, email_body)
642 email_body_plaintext, email_body)
642
643
643 except Exception:
644 except Exception:
644 log.error('Failed to update user password')
645 log.error('Failed to update user password')
645 log.error(traceback.format_exc())
646 log.error(traceback.format_exc())
646 if pre_db:
647 if pre_db:
647 # we rollback only if local db stuff fails. If it goes into
648 # we rollback only if local db stuff fails. If it goes into
648 # run_task, we're pass rollback state this wouldn't work then
649 # run_task, we're pass rollback state this wouldn't work then
649 Session().rollback()
650 Session().rollback()
650
651
651 return True
652 return True
652
653
653 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
654 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
654 """
655 """
655 Fetches auth_user by user_id,or api_key if present.
656 Fetches auth_user by user_id,or api_key if present.
656 Fills auth_user attributes with those taken from database.
657 Fills auth_user attributes with those taken from database.
657 Additionally set's is_authenitated if lookup fails
658 Additionally set's is_authenitated if lookup fails
658 present in database
659 present in database
659
660
660 :param auth_user: instance of user to set attributes
661 :param auth_user: instance of user to set attributes
661 :param user_id: user id to fetch by
662 :param user_id: user id to fetch by
662 :param api_key: api key to fetch by
663 :param api_key: api key to fetch by
663 :param username: username to fetch by
664 :param username: username to fetch by
664 """
665 """
665 if user_id is None and api_key is None and username is None:
666 if user_id is None and api_key is None and username is None:
666 raise Exception('You need to pass user_id, api_key or username')
667 raise Exception('You need to pass user_id, api_key or username')
667
668
668 log.debug(
669 log.debug(
669 'AuthUser: fill data execution based on: '
670 'AuthUser: fill data execution based on: '
670 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
671 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
671 try:
672 try:
672 dbuser = None
673 dbuser = None
673 if user_id:
674 if user_id:
674 dbuser = self.get(user_id)
675 dbuser = self.get(user_id)
675 elif api_key:
676 elif api_key:
676 dbuser = self.get_by_auth_token(api_key)
677 dbuser = self.get_by_auth_token(api_key)
677 elif username:
678 elif username:
678 dbuser = self.get_by_username(username)
679 dbuser = self.get_by_username(username)
679
680
680 if not dbuser:
681 if not dbuser:
681 log.warning(
682 log.warning(
682 'Unable to lookup user by id:%s api_key:%s username:%s',
683 'Unable to lookup user by id:%s api_key:%s username:%s',
683 user_id, api_key, username)
684 user_id, api_key, username)
684 return False
685 return False
685 if not dbuser.active:
686 if not dbuser.active:
686 log.debug('User `%s:%s` is inactive, skipping fill data',
687 log.debug('User `%s:%s` is inactive, skipping fill data',
687 username, user_id)
688 username, user_id)
688 return False
689 return False
689
690
690 log.debug('AuthUser: filling found user:%s data', dbuser)
691 log.debug('AuthUser: filling found user:%s data', dbuser)
691 user_data = dbuser.get_dict()
692 user_data = dbuser.get_dict()
692
693
693 user_data.update({
694 user_data.update({
694 # set explicit the safe escaped values
695 # set explicit the safe escaped values
695 'first_name': dbuser.first_name,
696 'first_name': dbuser.first_name,
696 'last_name': dbuser.last_name,
697 'last_name': dbuser.last_name,
697 })
698 })
698
699
699 for k, v in user_data.items():
700 for k, v in user_data.items():
700 # properties of auth user we dont update
701 # properties of auth user we dont update
701 if k not in ['auth_tokens', 'permissions']:
702 if k not in ['auth_tokens', 'permissions']:
702 setattr(auth_user, k, v)
703 setattr(auth_user, k, v)
703
704
704 except Exception:
705 except Exception:
705 log.error(traceback.format_exc())
706 log.error(traceback.format_exc())
706 auth_user.is_authenticated = False
707 auth_user.is_authenticated = False
707 return False
708 return False
708
709
709 return True
710 return True
710
711
711 def has_perm(self, user, perm):
712 def has_perm(self, user, perm):
712 perm = self._get_perm(perm)
713 perm = self._get_perm(perm)
713 user = self._get_user(user)
714 user = self._get_user(user)
714
715
715 return UserToPerm.query().filter(UserToPerm.user == user)\
716 return UserToPerm.query().filter(UserToPerm.user == user)\
716 .filter(UserToPerm.permission == perm).scalar() is not None
717 .filter(UserToPerm.permission == perm).scalar() is not None
717
718
718 def grant_perm(self, user, perm):
719 def grant_perm(self, user, perm):
719 """
720 """
720 Grant user global permissions
721 Grant user global permissions
721
722
722 :param user:
723 :param user:
723 :param perm:
724 :param perm:
724 """
725 """
725 user = self._get_user(user)
726 user = self._get_user(user)
726 perm = self._get_perm(perm)
727 perm = self._get_perm(perm)
727 # if this permission is already granted skip it
728 # if this permission is already granted skip it
728 _perm = UserToPerm.query()\
729 _perm = UserToPerm.query()\
729 .filter(UserToPerm.user == user)\
730 .filter(UserToPerm.user == user)\
730 .filter(UserToPerm.permission == perm)\
731 .filter(UserToPerm.permission == perm)\
731 .scalar()
732 .scalar()
732 if _perm:
733 if _perm:
733 return
734 return
734 new = UserToPerm()
735 new = UserToPerm()
735 new.user = user
736 new.user = user
736 new.permission = perm
737 new.permission = perm
737 self.sa.add(new)
738 self.sa.add(new)
738 return new
739 return new
739
740
740 def revoke_perm(self, user, perm):
741 def revoke_perm(self, user, perm):
741 """
742 """
742 Revoke users global permissions
743 Revoke users global permissions
743
744
744 :param user:
745 :param user:
745 :param perm:
746 :param perm:
746 """
747 """
747 user = self._get_user(user)
748 user = self._get_user(user)
748 perm = self._get_perm(perm)
749 perm = self._get_perm(perm)
749
750
750 obj = UserToPerm.query()\
751 obj = UserToPerm.query()\
751 .filter(UserToPerm.user == user)\
752 .filter(UserToPerm.user == user)\
752 .filter(UserToPerm.permission == perm)\
753 .filter(UserToPerm.permission == perm)\
753 .scalar()
754 .scalar()
754 if obj:
755 if obj:
755 self.sa.delete(obj)
756 self.sa.delete(obj)
756
757
757 def add_extra_email(self, user, email):
758 def add_extra_email(self, user, email):
758 """
759 """
759 Adds email address to UserEmailMap
760 Adds email address to UserEmailMap
760
761
761 :param user:
762 :param user:
762 :param email:
763 :param email:
763 """
764 """
764
765
765 user = self._get_user(user)
766 user = self._get_user(user)
766
767
767 obj = UserEmailMap()
768 obj = UserEmailMap()
768 obj.user = user
769 obj.user = user
769 obj.email = email
770 obj.email = email
770 self.sa.add(obj)
771 self.sa.add(obj)
771 return obj
772 return obj
772
773
773 def delete_extra_email(self, user, email_id):
774 def delete_extra_email(self, user, email_id):
774 """
775 """
775 Removes email address from UserEmailMap
776 Removes email address from UserEmailMap
776
777
777 :param user:
778 :param user:
778 :param email_id:
779 :param email_id:
779 """
780 """
780 user = self._get_user(user)
781 user = self._get_user(user)
781 obj = UserEmailMap.query().get(email_id)
782 obj = UserEmailMap.query().get(email_id)
782 if obj and obj.user_id == user.user_id:
783 if obj and obj.user_id == user.user_id:
783 self.sa.delete(obj)
784 self.sa.delete(obj)
784
785
785 def parse_ip_range(self, ip_range):
786 def parse_ip_range(self, ip_range):
786 ip_list = []
787 ip_list = []
787
788
788 def make_unique(value):
789 def make_unique(value):
789 seen = []
790 seen = []
790 return [c for c in value if not (c in seen or seen.append(c))]
791 return [c for c in value if not (c in seen or seen.append(c))]
791
792
792 # firsts split by commas
793 # firsts split by commas
793 for ip_range in ip_range.split(','):
794 for ip_range in ip_range.split(','):
794 if not ip_range:
795 if not ip_range:
795 continue
796 continue
796 ip_range = ip_range.strip()
797 ip_range = ip_range.strip()
797 if '-' in ip_range:
798 if '-' in ip_range:
798 start_ip, end_ip = ip_range.split('-', 1)
799 start_ip, end_ip = ip_range.split('-', 1)
799 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
800 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
800 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
801 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
801 parsed_ip_range = []
802 parsed_ip_range = []
802
803
803 for index in xrange(int(start_ip), int(end_ip) + 1):
804 for index in xrange(int(start_ip), int(end_ip) + 1):
804 new_ip = ipaddress.ip_address(index)
805 new_ip = ipaddress.ip_address(index)
805 parsed_ip_range.append(str(new_ip))
806 parsed_ip_range.append(str(new_ip))
806 ip_list.extend(parsed_ip_range)
807 ip_list.extend(parsed_ip_range)
807 else:
808 else:
808 ip_list.append(ip_range)
809 ip_list.append(ip_range)
809
810
810 return make_unique(ip_list)
811 return make_unique(ip_list)
811
812
812 def add_extra_ip(self, user, ip, description=None):
813 def add_extra_ip(self, user, ip, description=None):
813 """
814 """
814 Adds ip address to UserIpMap
815 Adds ip address to UserIpMap
815
816
816 :param user:
817 :param user:
817 :param ip:
818 :param ip:
818 """
819 """
819
820
820 user = self._get_user(user)
821 user = self._get_user(user)
821 obj = UserIpMap()
822 obj = UserIpMap()
822 obj.user = user
823 obj.user = user
823 obj.ip_addr = ip
824 obj.ip_addr = ip
824 obj.description = description
825 obj.description = description
825 self.sa.add(obj)
826 self.sa.add(obj)
826 return obj
827 return obj
827
828
828 def delete_extra_ip(self, user, ip_id):
829 def delete_extra_ip(self, user, ip_id):
829 """
830 """
830 Removes ip address from UserIpMap
831 Removes ip address from UserIpMap
831
832
832 :param user:
833 :param user:
833 :param ip_id:
834 :param ip_id:
834 """
835 """
835 user = self._get_user(user)
836 user = self._get_user(user)
836 obj = UserIpMap.query().get(ip_id)
837 obj = UserIpMap.query().get(ip_id)
837 if obj and obj.user_id == user.user_id:
838 if obj and obj.user_id == user.user_id:
838 self.sa.delete(obj)
839 self.sa.delete(obj)
839
840
840 def get_accounts_in_creation_order(self, current_user=None):
841 def get_accounts_in_creation_order(self, current_user=None):
841 """
842 """
842 Get accounts in order of creation for deactivation for license limits
843 Get accounts in order of creation for deactivation for license limits
843
844
844 pick currently logged in user, and append to the list in position 0
845 pick currently logged in user, and append to the list in position 0
845 pick all super-admins in order of creation date and add it to the list
846 pick all super-admins in order of creation date and add it to the list
846 pick all other accounts in order of creation and add it to the list.
847 pick all other accounts in order of creation and add it to the list.
847
848
848 Based on that list, the last accounts can be disabled as they are
849 Based on that list, the last accounts can be disabled as they are
849 created at the end and don't include any of the super admins as well
850 created at the end and don't include any of the super admins as well
850 as the current user.
851 as the current user.
851
852
852 :param current_user: optionally current user running this operation
853 :param current_user: optionally current user running this operation
853 """
854 """
854
855
855 if not current_user:
856 if not current_user:
856 current_user = get_current_rhodecode_user()
857 current_user = get_current_rhodecode_user()
857 active_super_admins = [
858 active_super_admins = [
858 x.user_id for x in User.query()
859 x.user_id for x in User.query()
859 .filter(User.user_id != current_user.user_id)
860 .filter(User.user_id != current_user.user_id)
860 .filter(User.active == true())
861 .filter(User.active == true())
861 .filter(User.admin == true())
862 .filter(User.admin == true())
862 .order_by(User.created_on.asc())]
863 .order_by(User.created_on.asc())]
863
864
864 active_regular_users = [
865 active_regular_users = [
865 x.user_id for x in User.query()
866 x.user_id for x in User.query()
866 .filter(User.user_id != current_user.user_id)
867 .filter(User.user_id != current_user.user_id)
867 .filter(User.active == true())
868 .filter(User.active == true())
868 .filter(User.admin == false())
869 .filter(User.admin == false())
869 .order_by(User.created_on.asc())]
870 .order_by(User.created_on.asc())]
870
871
871 list_of_accounts = [current_user.user_id]
872 list_of_accounts = [current_user.user_id]
872 list_of_accounts += active_super_admins
873 list_of_accounts += active_super_admins
873 list_of_accounts += active_regular_users
874 list_of_accounts += active_regular_users
874
875
875 return list_of_accounts
876 return list_of_accounts
876
877
877 def deactivate_last_users(self, expected_users, current_user=None):
878 def deactivate_last_users(self, expected_users, current_user=None):
878 """
879 """
879 Deactivate accounts that are over the license limits.
880 Deactivate accounts that are over the license limits.
880 Algorithm of which accounts to disabled is based on the formula:
881 Algorithm of which accounts to disabled is based on the formula:
881
882
882 Get current user, then super admins in creation order, then regular
883 Get current user, then super admins in creation order, then regular
883 active users in creation order.
884 active users in creation order.
884
885
885 Using that list we mark all accounts from the end of it as inactive.
886 Using that list we mark all accounts from the end of it as inactive.
886 This way we block only latest created accounts.
887 This way we block only latest created accounts.
887
888
888 :param expected_users: list of users in special order, we deactivate
889 :param expected_users: list of users in special order, we deactivate
889 the end N ammoun of users from that list
890 the end N ammoun of users from that list
890 """
891 """
891
892
892 list_of_accounts = self.get_accounts_in_creation_order(
893 list_of_accounts = self.get_accounts_in_creation_order(
893 current_user=current_user)
894 current_user=current_user)
894
895
895 for acc_id in list_of_accounts[expected_users + 1:]:
896 for acc_id in list_of_accounts[expected_users + 1:]:
896 user = User.get(acc_id)
897 user = User.get(acc_id)
897 log.info('Deactivating account %s for license unlock', user)
898 log.info('Deactivating account %s for license unlock', user)
898 user.active = False
899 user.active = False
899 Session().add(user)
900 Session().add(user)
900 Session().commit()
901 Session().commit()
901
902
902 return
903 return
903
904
904 def get_user_log(self, user, filter_term):
905 def get_user_log(self, user, filter_term):
905 user_log = UserLog.query()\
906 user_log = UserLog.query()\
906 .filter(or_(UserLog.user_id == user.user_id,
907 .filter(or_(UserLog.user_id == user.user_id,
907 UserLog.username == user.username))\
908 UserLog.username == user.username))\
908 .options(joinedload(UserLog.user))\
909 .options(joinedload(UserLog.user))\
909 .options(joinedload(UserLog.repository))\
910 .options(joinedload(UserLog.repository))\
910 .order_by(UserLog.action_date.desc())
911 .order_by(UserLog.action_date.desc())
911
912
912 user_log = user_log_filter(user_log, filter_term)
913 user_log = user_log_filter(user_log, filter_term)
913 return user_log
914 return user_log
@@ -1,34 +1,35 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2017 RhodeCode GmbH
3 # Copyright (C) 2016-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import colander
21 import colander
22 from rhodecode.model.validation_schema import validators, preparers, types
22 from rhodecode.model.validation_schema import validators, preparers, types
23
23
24
24
25 class ReviewerSchema(colander.MappingSchema):
25 class ReviewerSchema(colander.MappingSchema):
26 username = colander.SchemaNode(types.StrOrIntType())
26 username = colander.SchemaNode(types.StrOrIntType())
27 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
27 reasons = colander.SchemaNode(colander.List(), missing=['no reason specified'])
28 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
28 mandatory = colander.SchemaNode(colander.Boolean(), missing=False)
29 rules = colander.SchemaNode(colander.List(), missing=[])
29
30
30
31
31 class ReviewerListSchema(colander.SequenceSchema):
32 class ReviewerListSchema(colander.SequenceSchema):
32 reviewers = ReviewerSchema()
33 reviewers = ReviewerSchema()
33
34
34
35
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now